-
Notifications
You must be signed in to change notification settings - Fork 2.2k
fix: oas should import requests where possible #9632
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -173,8 +173,12 @@ export async function scanResources(importEntries: ImportEntry[]): Promise<ScanR | |||||
| let v5Error = null; | ||||||
|
|
||||||
| try { | ||||||
| const { data: insomnia5Import, error } = tryImportV5Data(contentStr); | ||||||
| v5Error = error; | ||||||
| let insomnia5Import: ExportedModel[] = []; | ||||||
| if (contentStr.startsWith('type: ')) { | ||||||
| const { data, error } = tryImportV5Data(contentStr); | ||||||
| insomnia5Import = data as ExportedModel[]; | ||||||
| v5Error = error; | ||||||
| } | ||||||
|
Comment on lines
+176
to
+181
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I did this because the error spam was killing my debugging experience
Comment on lines
+176
to
+181
|
||||||
| if (insomnia5Import.length > 0) { | ||||||
| result = { | ||||||
| type: { | ||||||
|
|
@@ -183,7 +187,6 @@ export async function scanResources(importEntries: ImportEntry[]): Promise<ScanR | |||||
| description: 'Insomnia v5', | ||||||
| }, | ||||||
| data: { | ||||||
| // @ts-expect-error -- TSCONVERSION | ||||||
| resources: insomnia5Import, | ||||||
| }, | ||||||
| }; | ||||||
|
|
@@ -234,6 +237,7 @@ export async function scanResources(importEntries: ImportEntry[]): Promise<ScanR | |||||
| }); | ||||||
|
|
||||||
| const requests = resources.filter(isRequest); | ||||||
| const requestGroups = resources.filter(isRequestGroup); | ||||||
| const websocketRequests = resources.filter(isWebSocketRequest); | ||||||
| const grpcRequests = resources.filter(isGrpcRequest); | ||||||
| const socketIoRequests = resources.filter(isSocketIORequest); | ||||||
|
|
@@ -251,6 +255,7 @@ export async function scanResources(importEntries: ImportEntry[]): Promise<ScanR | |||||
| unitTests, | ||||||
| unitTestSuites, | ||||||
| requests: [...requests, ...websocketRequests, ...grpcRequests, ...socketIoRequests], | ||||||
| requestGroups, | ||||||
|
Comment on lines
240
to
258
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This was most of what was broken about imports before, if im not mistaken |
||||||
| workspaces, | ||||||
|
Comment on lines
239
to
259
|
||||||
| environments, | ||||||
| apiSpecs, | ||||||
|
|
@@ -598,21 +603,14 @@ export const importResourcesToNewWorkspace = async ({ | |||||
| const resources = resourceCacheItem.resources; | ||||||
| const ResourceIdMap = new Map(); | ||||||
| let newWorkspace: Workspace; | ||||||
| // in order to support import from api spec yaml | ||||||
| if (resourceCacheItem?.importer?.id && isApiSpecImport(resourceCacheItem.importer)) { | ||||||
| // support import from both insomnia export and api spec yaml | ||||||
| if (resources.find(isApiSpec) || isApiSpecImport(resourceCacheItem.importer)) { | ||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this makes the next code support both v5 and oas |
||||||
| newWorkspace = await models.workspace.create({ | ||||||
| name: workspaceToImport?.name, | ||||||
| scope: 'design', | ||||||
| parentId: projectId, | ||||||
| }); | ||||||
|
|
||||||
| if (isGitProject(project)) { | ||||||
| const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
| await models.workspaceMeta.update(workspaceMeta, { | ||||||
| gitFilePath: `${newWorkspace.name}-${newWorkspace._id}.yaml`, | ||||||
| }); | ||||||
| } | ||||||
|
|
||||||
| await models.apiSpec.updateOrCreateForParentId(newWorkspace._id, { | ||||||
| contents: resourceCacheItem.content as string | undefined, | ||||||
| contentType: 'yaml', | ||||||
|
|
@@ -624,95 +622,81 @@ export const importResourcesToNewWorkspace = async ({ | |||||
| scope: workspaceToImport?.scope || 'collection', | ||||||
| parentId: projectId, | ||||||
| }); | ||||||
| } | ||||||
|
|
||||||
| if (isGitProject(project)) { | ||||||
| const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
| await models.workspaceMeta.update(workspaceMeta, { | ||||||
| gitFilePath: `${newWorkspace.name}-${newWorkspace._id}.yaml`, | ||||||
| }); | ||||||
| } | ||||||
|
|
||||||
| const apiSpec = resources.find(r => r.type === 'ApiSpec' && r.parentId === workspaceToImport?._id) as ApiSpec; | ||||||
| const hasApiSpec = newWorkspace.scope === 'design' && isApiSpec(apiSpec); | ||||||
| // if workspace is not in the resources, there will be no apiSpec, if resource type is set to api spec this could cause a bug | ||||||
| if (hasApiSpec) { | ||||||
| // TODO: will overwrite existing api spec, not needed after migrate hack is removed | ||||||
| await models.apiSpec.updateOrCreateForParentId(newWorkspace._id, { | ||||||
| contents: apiSpec.contents, | ||||||
| contentType: apiSpec.contentType, | ||||||
| fileName: workspaceToImport?.name, | ||||||
| }); | ||||||
| } | ||||||
|
|
||||||
| // If we're importing into a new workspace | ||||||
| // Map new IDs | ||||||
| ResourceIdMap.set('__WORKSPACE_ID__', newWorkspace._id); | ||||||
| workspaceToImport && ResourceIdMap.set(workspaceToImport._id, newWorkspace._id); | ||||||
| // If we're importing into a new workspace | ||||||
| // Map new IDs | ||||||
| ResourceIdMap.set('__WORKSPACE_ID__', newWorkspace._id); | ||||||
| workspaceToImport && ResourceIdMap.set(workspaceToImport._id, newWorkspace._id); | ||||||
|
|
||||||
| const resourcesWithoutWorkspaceAndApiSpec = resources.filter( | ||||||
| resource => !isWorkspace(resource) && !isApiSpec(resource), | ||||||
| ); | ||||||
| const resourcesWithoutWorkspaceAndApiSpec = resources.filter( | ||||||
| resource => !isWorkspace(resource) && !isApiSpec(resource), | ||||||
| ); | ||||||
|
|
||||||
| for (const resource of resourcesWithoutWorkspaceAndApiSpec) { | ||||||
| const model = getModel(resource.type); | ||||||
| model && ResourceIdMap.set(resource._id, generateId(model.prefix)); | ||||||
| } | ||||||
| for (const resource of resourcesWithoutWorkspaceAndApiSpec) { | ||||||
| const model = getModel(resource.type); | ||||||
| model && ResourceIdMap.set(resource._id, generateId(model.prefix)); | ||||||
| } | ||||||
|
|
||||||
| for (const resource of resourcesWithoutWorkspaceAndApiSpec) { | ||||||
| const model = getModel(resource.type); | ||||||
| for (const resource of resourcesWithoutWorkspaceAndApiSpec) { | ||||||
| const model = getModel(resource.type); | ||||||
|
|
||||||
| if (model) { | ||||||
| const newParentId = ResourceIdMap.get(resource.parentId); | ||||||
| if (!newParentId) { | ||||||
| console.warn(`Could not find new parent id for ${resource.name} ${resource._id}`); | ||||||
| continue; | ||||||
| } | ||||||
| if (isGrpcRequest(resource)) { | ||||||
| await models.grpcRequest.create({ | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| protoFileId: ResourceIdMap.get(resource.protoFileId), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } else if (isUnitTest(resource)) { | ||||||
| await models.unitTest.create({ | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| requestId: ResourceIdMap.get(resource.requestId), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } else if (isRequest(resource)) { | ||||||
| await models.request.create(importRequestWithNewIds(resource, ResourceIdMap)); | ||||||
| } else { | ||||||
| await db.docCreate(model.type, { | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } | ||||||
| if (model) { | ||||||
| const newParentId = ResourceIdMap.get(resource.parentId); | ||||||
| if (!newParentId) { | ||||||
| console.warn(`Could not find new parent id for ${resource.name} ${resource._id}`); | ||||||
| continue; | ||||||
| } | ||||||
| if (isGrpcRequest(resource)) { | ||||||
| await models.grpcRequest.create({ | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| protoFileId: ResourceIdMap.get(resource.protoFileId), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } else if (isUnitTest(resource)) { | ||||||
| await models.unitTest.create({ | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| requestId: ResourceIdMap.get(resource.requestId), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } else if (isRequest(resource)) { | ||||||
| await models.request.create(importRequestWithNewIds(resource, ResourceIdMap)); | ||||||
| } else { | ||||||
| await db.docCreate(model.type, { | ||||||
| ...resource, | ||||||
| _id: ResourceIdMap.get(resource._id), | ||||||
| parentId: newParentId, | ||||||
| }); | ||||||
| } | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| // Use the first sub environment as the active one | ||||||
| const subEnvironments = resources.filter(isEnvironment).filter(isSubEnvironmentResource) || []; | ||||||
| // Use the first sub environment as the active one | ||||||
| const subEnvironments = resources.filter(isEnvironment).filter(isSubEnvironmentResource) || []; | ||||||
|
|
||||||
| if (subEnvironments.length > 0) { | ||||||
| const firstSubEnvironment = subEnvironments[0]; | ||||||
| if (subEnvironments.length > 0) { | ||||||
| const firstSubEnvironment = subEnvironments[0]; | ||||||
|
|
||||||
| if (firstSubEnvironment) { | ||||||
| const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
| if (firstSubEnvironment) { | ||||||
| const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
|
|
||||||
| await models.workspaceMeta.update(workspaceMeta, { | ||||||
| activeEnvironmentId: ResourceIdMap.get(firstSubEnvironment._id), | ||||||
| }); | ||||||
| } | ||||||
| await models.workspaceMeta.update(workspaceMeta, { | ||||||
| activeEnvironmentId: ResourceIdMap.get(firstSubEnvironment._id), | ||||||
| }); | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| // Make sure the new workspace has required resources like base environment, cookie jar and workspaceMeta | ||||||
| await models.environment.getOrCreateForParentId(newWorkspace._id); | ||||||
| await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
| const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newWorkspace._id); | ||||||
|
|
||||||
| if (isGitProject(project)) { | ||||||
| await models.workspaceMeta.update(workspaceMeta, { | ||||||
| gitFilePath: `${newWorkspace.name}-${newWorkspace._id}.yaml`, | ||||||
|
||||||
| gitFilePath: `${newWorkspace.name}-${newWorkspace._id}.yaml`, | |
| gitFilePath: safeToUseInsomniaFileNameWithExt(`${newWorkspace.name}-${newWorkspace._id}`, 'yaml'), |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
moved this down since it was duplicated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Not sure if this will be true for all yaml imports.
Maybe we can improve the error logging instead?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm confused why this is not a consistent convert function like the others, as there are already too many try catches here