Browse Source
* initial implementation * next iteration * clean up * cleanup * fix test * cleanup * rename services, refactor, fix test logic * fix DI * update comment * update comment * resolve merge conflict * register dependencies out of jslib, more cleanup * fix failing test in build * fix type error * try fixing test error * remove async from test setup * attempt to fix test * fix text * fix test * add ts-expect-error comment * cleanup --------- Co-authored-by: bnagawiecki <107435978+bnagawiecki@users.noreply.github.com>pull/555/head
13 changed files with 515 additions and 131 deletions
@ -0,0 +1,6 @@ |
|||||||
|
import { DirectoryType } from "@/src/enums/directoryType"; |
||||||
|
import { IDirectoryService } from "@/src/services/directory.service"; |
||||||
|
|
||||||
|
export abstract class DirectoryFactoryService { |
||||||
|
abstract createService(type: DirectoryType): IDirectoryService; |
||||||
|
} |
||||||
@ -0,0 +1,13 @@ |
|||||||
|
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; |
||||||
|
|
||||||
|
import { GroupEntry } from "@/src/models/groupEntry"; |
||||||
|
import { UserEntry } from "@/src/models/userEntry"; |
||||||
|
|
||||||
|
export abstract class RequestBuilder { |
||||||
|
buildRequest: ( |
||||||
|
groups: GroupEntry[], |
||||||
|
users: UserEntry[], |
||||||
|
removeDisabled: boolean, |
||||||
|
overwriteExisting: boolean, |
||||||
|
) => OrganizationImportRequest[]; |
||||||
|
} |
||||||
@ -0,0 +1,70 @@ |
|||||||
|
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; |
||||||
|
|
||||||
|
import { GroupEntry } from "@/src/models/groupEntry"; |
||||||
|
import { UserEntry } from "@/src/models/userEntry"; |
||||||
|
|
||||||
|
import { RequestBuilder } from "../abstractions/request-builder.service"; |
||||||
|
|
||||||
|
import { batchSize } from "./sync.service"; |
||||||
|
|
||||||
|
/** |
||||||
|
* This class is responsible for batching large sync requests (>2k users) into multiple smaller |
||||||
|
* requests to the /import endpoint. This is done to ensure we are under the default |
||||||
|
* maximum packet size for NGINX web servers to avoid the request potentially timing out |
||||||
|
* */ |
||||||
|
export class BatchRequestBuilder implements RequestBuilder { |
||||||
|
buildRequest( |
||||||
|
groups: GroupEntry[], |
||||||
|
users: UserEntry[], |
||||||
|
removeDisabled: boolean, |
||||||
|
overwriteExisting: boolean, |
||||||
|
): OrganizationImportRequest[] { |
||||||
|
const requests: OrganizationImportRequest[] = []; |
||||||
|
|
||||||
|
if (users.length > 0) { |
||||||
|
const usersRequest = users.map((u) => { |
||||||
|
return { |
||||||
|
email: u.email, |
||||||
|
externalId: u.externalId, |
||||||
|
deleted: u.deleted || (removeDisabled && u.disabled), |
||||||
|
}; |
||||||
|
}); |
||||||
|
|
||||||
|
// Partition users
|
||||||
|
for (let i = 0; i < usersRequest.length; i += batchSize) { |
||||||
|
const u = usersRequest.slice(i, i + batchSize); |
||||||
|
const req = new OrganizationImportRequest({ |
||||||
|
groups: [], |
||||||
|
users: u, |
||||||
|
largeImport: true, |
||||||
|
overwriteExisting, |
||||||
|
}); |
||||||
|
requests.push(req); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
if (groups.length > 0) { |
||||||
|
const groupRequest = groups.map((g) => { |
||||||
|
return { |
||||||
|
name: g.name, |
||||||
|
externalId: g.externalId, |
||||||
|
memberExternalIds: Array.from(g.userMemberExternalIds), |
||||||
|
}; |
||||||
|
}); |
||||||
|
|
||||||
|
// Partition groups
|
||||||
|
for (let i = 0; i < groupRequest.length; i += batchSize) { |
||||||
|
const g = groupRequest.slice(i, i + batchSize); |
||||||
|
const req = new OrganizationImportRequest({ |
||||||
|
groups: g, |
||||||
|
users: [], |
||||||
|
largeImport: true, |
||||||
|
overwriteExisting, |
||||||
|
}); |
||||||
|
requests.push(req); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return requests; |
||||||
|
} |
||||||
|
} |
||||||
@ -0,0 +1,47 @@ |
|||||||
|
import { GroupEntry } from "@/src/models/groupEntry"; |
||||||
|
import { UserEntry } from "@/src/models/userEntry"; |
||||||
|
|
||||||
|
import { BatchRequestBuilder } from "./batch-request-builder"; |
||||||
|
import { SingleRequestBuilder } from "./single-request-builder"; |
||||||
|
|
||||||
|
describe("BatchRequestBuilder", () => { |
||||||
|
let batchRequestBuilder: BatchRequestBuilder; |
||||||
|
let singleRequestBuilder: SingleRequestBuilder; |
||||||
|
|
||||||
|
function userSimulator(userCount: number) { |
||||||
|
return Array(userCount).fill(new UserEntry()); |
||||||
|
} |
||||||
|
|
||||||
|
function groupSimulator(groupCount: number) { |
||||||
|
return Array(groupCount).fill(new GroupEntry()); |
||||||
|
} |
||||||
|
|
||||||
|
beforeEach(async () => { |
||||||
|
batchRequestBuilder = new BatchRequestBuilder(); |
||||||
|
singleRequestBuilder = new SingleRequestBuilder(); |
||||||
|
}); |
||||||
|
|
||||||
|
it("BatchRequestBuilder batches requests for > 2000 users", () => { |
||||||
|
const mockGroups = groupSimulator(11000); |
||||||
|
const mockUsers = userSimulator(11000); |
||||||
|
|
||||||
|
const requests = batchRequestBuilder.buildRequest(mockGroups, mockUsers, true, true); |
||||||
|
|
||||||
|
expect(requests.length).toEqual(12); |
||||||
|
}); |
||||||
|
|
||||||
|
it("SingleRequestBuilder returns single request for 200 users", () => { |
||||||
|
const mockGroups = groupSimulator(200); |
||||||
|
const mockUsers = userSimulator(200); |
||||||
|
|
||||||
|
const requests = singleRequestBuilder.buildRequest(mockGroups, mockUsers, true, true); |
||||||
|
|
||||||
|
expect(requests.length).toEqual(1); |
||||||
|
}); |
||||||
|
|
||||||
|
it("BatchRequestBuilder retuns an empty array when there are no users or groups", () => { |
||||||
|
const requests = batchRequestBuilder.buildRequest([], [], true, true); |
||||||
|
|
||||||
|
expect(requests).toEqual([]); |
||||||
|
}); |
||||||
|
}); |
||||||
@ -0,0 +1,37 @@ |
|||||||
|
import { I18nService } from "@/jslib/common/src/abstractions/i18n.service"; |
||||||
|
import { LogService } from "@/jslib/common/src/abstractions/log.service"; |
||||||
|
|
||||||
|
import { DirectoryFactoryService } from "../abstractions/directory-factory.service"; |
||||||
|
import { StateService } from "../abstractions/state.service"; |
||||||
|
import { DirectoryType } from "../enums/directoryType"; |
||||||
|
|
||||||
|
import { AzureDirectoryService } from "./azure-directory.service"; |
||||||
|
import { GSuiteDirectoryService } from "./gsuite-directory.service"; |
||||||
|
import { LdapDirectoryService } from "./ldap-directory.service"; |
||||||
|
import { OktaDirectoryService } from "./okta-directory.service"; |
||||||
|
import { OneLoginDirectoryService } from "./onelogin-directory.service"; |
||||||
|
|
||||||
|
export class DefaultDirectoryFactoryService implements DirectoryFactoryService { |
||||||
|
constructor( |
||||||
|
private logService: LogService, |
||||||
|
private i18nService: I18nService, |
||||||
|
private stateService: StateService, |
||||||
|
) {} |
||||||
|
|
||||||
|
createService(directoryType: DirectoryType) { |
||||||
|
switch (directoryType) { |
||||||
|
case DirectoryType.GSuite: |
||||||
|
return new GSuiteDirectoryService(this.logService, this.i18nService, this.stateService); |
||||||
|
case DirectoryType.AzureActiveDirectory: |
||||||
|
return new AzureDirectoryService(this.logService, this.i18nService, this.stateService); |
||||||
|
case DirectoryType.Ldap: |
||||||
|
return new LdapDirectoryService(this.logService, this.i18nService, this.stateService); |
||||||
|
case DirectoryType.Okta: |
||||||
|
return new OktaDirectoryService(this.logService, this.i18nService, this.stateService); |
||||||
|
case DirectoryType.OneLogin: |
||||||
|
return new OneLoginDirectoryService(this.logService, this.i18nService, this.stateService); |
||||||
|
default: |
||||||
|
throw new Error("Invalid Directory Type"); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
@ -0,0 +1,42 @@ |
|||||||
|
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; |
||||||
|
|
||||||
|
import { GroupEntry } from "@/src/models/groupEntry"; |
||||||
|
import { UserEntry } from "@/src/models/userEntry"; |
||||||
|
|
||||||
|
import { RequestBuilder } from "../abstractions/request-builder.service"; |
||||||
|
|
||||||
|
/** |
||||||
|
* This class is responsible for building small (<2k users) syncs as a single |
||||||
|
* request to the /import endpoint. This is done to be backwards compatible with |
||||||
|
* existing functionality for sync requests that are sufficiently small enough to not |
||||||
|
* exceed default maximum packet size limits on NGINX web servers. |
||||||
|
* */ |
||||||
|
export class SingleRequestBuilder implements RequestBuilder { |
||||||
|
buildRequest( |
||||||
|
groups: GroupEntry[], |
||||||
|
users: UserEntry[], |
||||||
|
removeDisabled: boolean, |
||||||
|
overwriteExisting: boolean, |
||||||
|
): OrganizationImportRequest[] { |
||||||
|
return [ |
||||||
|
new OrganizationImportRequest({ |
||||||
|
groups: (groups ?? []).map((g) => { |
||||||
|
return { |
||||||
|
name: g.name, |
||||||
|
externalId: g.externalId, |
||||||
|
memberExternalIds: Array.from(g.userMemberExternalIds), |
||||||
|
}; |
||||||
|
}), |
||||||
|
users: (users ?? []).map((u) => { |
||||||
|
return { |
||||||
|
email: u.email, |
||||||
|
externalId: u.externalId, |
||||||
|
deleted: u.deleted || (removeDisabled && u.disabled), |
||||||
|
}; |
||||||
|
}), |
||||||
|
overwriteExisting: overwriteExisting, |
||||||
|
largeImport: false, |
||||||
|
}), |
||||||
|
]; |
||||||
|
} |
||||||
|
} |
||||||
@ -0,0 +1,132 @@ |
|||||||
|
import { mock, MockProxy } from "jest-mock-extended"; |
||||||
|
|
||||||
|
import { CryptoFunctionService } from "@/jslib/common/src/abstractions/cryptoFunction.service"; |
||||||
|
import { EnvironmentService } from "@/jslib/common/src/abstractions/environment.service"; |
||||||
|
import { MessagingService } from "@/jslib/common/src/abstractions/messaging.service"; |
||||||
|
import { OrganizationImportRequest } from "@/jslib/common/src/models/request/organizationImportRequest"; |
||||||
|
import { ApiService } from "@/jslib/common/src/services/api.service"; |
||||||
|
|
||||||
|
import { DirectoryFactoryService } from "../abstractions/directory-factory.service"; |
||||||
|
import { DirectoryType } from "../enums/directoryType"; |
||||||
|
import { getSyncConfiguration } from "../utils/test-fixtures"; |
||||||
|
|
||||||
|
import { BatchRequestBuilder } from "./batch-request-builder"; |
||||||
|
import { I18nService } from "./i18n.service"; |
||||||
|
import { LdapDirectoryService } from "./ldap-directory.service"; |
||||||
|
import { SingleRequestBuilder } from "./single-request-builder"; |
||||||
|
import { StateService } from "./state.service"; |
||||||
|
import { SyncService } from "./sync.service"; |
||||||
|
import * as constants from "./sync.service"; |
||||||
|
|
||||||
|
import { groupFixtures } from "@/openldap/group-fixtures"; |
||||||
|
import { userFixtures } from "@/openldap/user-fixtures"; |
||||||
|
|
||||||
|
describe("SyncService", () => { |
||||||
|
let cryptoFunctionService: MockProxy<CryptoFunctionService>; |
||||||
|
let apiService: MockProxy<ApiService>; |
||||||
|
let messagingService: MockProxy<MessagingService>; |
||||||
|
let i18nService: MockProxy<I18nService>; |
||||||
|
let environmentService: MockProxy<EnvironmentService>; |
||||||
|
let stateService: MockProxy<StateService>; |
||||||
|
let directoryFactory: MockProxy<DirectoryFactoryService>; |
||||||
|
let batchRequestBuilder: MockProxy<BatchRequestBuilder>; |
||||||
|
let singleRequestBuilder: MockProxy<SingleRequestBuilder>; |
||||||
|
|
||||||
|
let syncService: SyncService; |
||||||
|
|
||||||
|
beforeEach(() => { |
||||||
|
cryptoFunctionService = mock(); |
||||||
|
apiService = mock(); |
||||||
|
messagingService = mock(); |
||||||
|
i18nService = mock(); |
||||||
|
environmentService = mock(); |
||||||
|
stateService = mock(); |
||||||
|
directoryFactory = mock(); |
||||||
|
batchRequestBuilder = mock(); |
||||||
|
singleRequestBuilder = mock(); |
||||||
|
|
||||||
|
stateService.getDirectoryType.mockResolvedValue(DirectoryType.Ldap); |
||||||
|
stateService.getOrganizationId.mockResolvedValue("fakeId"); |
||||||
|
const mockDirectoryService = mock<LdapDirectoryService>(); |
||||||
|
mockDirectoryService.getEntries.mockResolvedValue([groupFixtures, userFixtures]); |
||||||
|
directoryFactory.createService.mockReturnValue(mockDirectoryService); |
||||||
|
|
||||||
|
syncService = new SyncService( |
||||||
|
cryptoFunctionService, |
||||||
|
apiService, |
||||||
|
messagingService, |
||||||
|
i18nService, |
||||||
|
environmentService, |
||||||
|
stateService, |
||||||
|
batchRequestBuilder, |
||||||
|
singleRequestBuilder, |
||||||
|
directoryFactory, |
||||||
|
); |
||||||
|
}); |
||||||
|
|
||||||
|
it("Sync posts single request successfully for unique hashes", async () => { |
||||||
|
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true })); |
||||||
|
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); |
||||||
|
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||||
|
stateService.getLastSyncHash.mockResolvedValue("unique hash"); |
||||||
|
|
||||||
|
const mockRequest: OrganizationImportRequest[] = [ |
||||||
|
{ |
||||||
|
members: [], |
||||||
|
groups: [], |
||||||
|
overwriteExisting: true, |
||||||
|
largeImport: true, |
||||||
|
}, |
||||||
|
]; |
||||||
|
|
||||||
|
singleRequestBuilder.buildRequest.mockReturnValue(mockRequest); |
||||||
|
|
||||||
|
await syncService.sync(true, false); |
||||||
|
|
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(1); |
||||||
|
}); |
||||||
|
|
||||||
|
it("Sync posts multiple request successfully for unique hashes", async () => { |
||||||
|
stateService.getSync.mockResolvedValue( |
||||||
|
getSyncConfiguration({ groups: true, users: true, largeImport: true }), |
||||||
|
); |
||||||
|
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); |
||||||
|
// This arranges the last hash to be differet from the ArrayBuffer after it is converted to b64
|
||||||
|
stateService.getLastSyncHash.mockResolvedValue("unique hash"); |
||||||
|
|
||||||
|
// @ts-expect-error This is a workaround to make the batchsize smaller to trigger the batching logic since its a const.
|
||||||
|
constants.batchSize = 4; |
||||||
|
|
||||||
|
const mockRequests = new Array(6).fill({ |
||||||
|
members: [], |
||||||
|
groups: [], |
||||||
|
overwriteExisting: true, |
||||||
|
largeImport: true, |
||||||
|
}); |
||||||
|
|
||||||
|
batchRequestBuilder.buildRequest.mockReturnValue(mockRequests); |
||||||
|
|
||||||
|
await syncService.sync(true, false); |
||||||
|
|
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledTimes(6); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[0]); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[1]); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[2]); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[3]); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[4]); |
||||||
|
expect(apiService.postPublicImportDirectory).toHaveBeenCalledWith(mockRequests[5]); |
||||||
|
}); |
||||||
|
|
||||||
|
it("does not post for the same hash", async () => { |
||||||
|
// @ts-expect-error this sets the batch size back to its expexted value for this test.
|
||||||
|
constants.batchSize = 2000; |
||||||
|
stateService.getSync.mockResolvedValue(getSyncConfiguration({ groups: true, users: true })); |
||||||
|
cryptoFunctionService.hash.mockResolvedValue(new ArrayBuffer(1)); |
||||||
|
// This arranges the last hash to be the same as the ArrayBuffer after it is converted to b64
|
||||||
|
stateService.getLastSyncHash.mockResolvedValue("AA=="); |
||||||
|
|
||||||
|
await syncService.sync(true, false); |
||||||
|
|
||||||
|
expect(apiService.postPublicImportDirectory).not.toHaveBeenCalled(); |
||||||
|
}); |
||||||
|
}); |
||||||
@ -0,0 +1,53 @@ |
|||||||
|
import { LdapConfiguration } from "../models/ldapConfiguration"; |
||||||
|
import { SyncConfiguration } from "../models/syncConfiguration"; |
||||||
|
|
||||||
|
/** |
||||||
|
* @returns a basic ldap configuration without TLS/SSL enabled. Can be overridden by passing in a partial configuration. |
||||||
|
*/ |
||||||
|
export const getLdapConfiguration = (config?: Partial<LdapConfiguration>): LdapConfiguration => ({ |
||||||
|
ssl: false, |
||||||
|
startTls: false, |
||||||
|
tlsCaPath: null, |
||||||
|
sslAllowUnauthorized: false, |
||||||
|
sslCertPath: null, |
||||||
|
sslKeyPath: null, |
||||||
|
sslCaPath: null, |
||||||
|
hostname: "localhost", |
||||||
|
port: 1389, |
||||||
|
domain: null, |
||||||
|
rootPath: "dc=bitwarden,dc=com", |
||||||
|
currentUser: false, |
||||||
|
username: "cn=admin,dc=bitwarden,dc=com", |
||||||
|
password: "admin", |
||||||
|
ad: false, |
||||||
|
pagedSearch: false, |
||||||
|
...(config ?? {}), |
||||||
|
}); |
||||||
|
|
||||||
|
/** |
||||||
|
* @returns a basic sync configuration. Can be overridden by passing in a partial configuration. |
||||||
|
*/ |
||||||
|
export const getSyncConfiguration = (config?: Partial<SyncConfiguration>): SyncConfiguration => ({ |
||||||
|
users: false, |
||||||
|
groups: false, |
||||||
|
interval: 5, |
||||||
|
userFilter: null, |
||||||
|
groupFilter: null, |
||||||
|
removeDisabled: false, |
||||||
|
overwriteExisting: false, |
||||||
|
largeImport: false, |
||||||
|
// Ldap properties
|
||||||
|
groupObjectClass: "posixGroup", |
||||||
|
userObjectClass: "person", |
||||||
|
groupPath: null, |
||||||
|
userPath: null, |
||||||
|
groupNameAttribute: "cn", |
||||||
|
userEmailAttribute: "mail", |
||||||
|
memberAttribute: "memberUid", |
||||||
|
useEmailPrefixSuffix: false, |
||||||
|
emailPrefixAttribute: "sAMAccountName", |
||||||
|
emailSuffix: null, |
||||||
|
creationDateAttribute: "whenCreated", |
||||||
|
revisionDateAttribute: "whenChanged", |
||||||
|
...(config ?? {}), |
||||||
|
}); |
||||||
Loading…
Reference in new issue