Created
April 8, 2019 15:14
-
-
Save langpavel/ffd6ecb5ba655b7b4bff3ebdca0942f0 to your computer and use it in GitHub Desktop.
graphql-loader-ts (really not finished)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* eslint-disable max-len, consistent-return, no-continue, no-restricted-syntax, func-names, no-throw-literal, global-require */ | |
// really heavily borrowed from 'graphql-tag/loader' | |
const fs = require('fs'); | |
const gql = require('graphql-tag/src'); | |
const { addTypenameToDocument } = require('apollo-utilities'); | |
// Takes `lines` (the source GraphQL query string) | |
// and `doc` (the parsed GraphQL document) and tacks on | |
// the imported definitions. | |
function expandImports(lines) { | |
let outputCode = ` | |
const names = {}; | |
function unique(defs) { | |
return defs.filter( | |
function(def) { | |
if (def.kind !== 'FragmentDefinition') return true; | |
var name = def.name.value | |
if (names[name]) { | |
return false; | |
} else { | |
names[name] = true; | |
return true; | |
} | |
} | |
) | |
} | |
`; | |
lines.some(line => { | |
if (line[0] === '#' && line.slice(1).split(' ')[0] === 'import') { | |
const importFile = line.slice(1).split(' ')[1]; | |
const parseDocument = `require(${importFile}).default`; | |
const appendDef = `doc.definitions = doc.definitions.concat(unique(${parseDocument}.definitions));`; | |
outputCode += `${appendDef}\n`; | |
} | |
return line.length !== 0 && line[0] !== '#'; | |
}); | |
return outputCode; | |
} | |
async function genSource(source, resourcePath, tsName) { | |
let doc = gql(source); | |
const lines = source.split(/\r\n|\r|\n/); | |
doc = addTypenameToDocument(doc); | |
const headerCode = ` | |
/* eslint-disable */ | |
/* tslint:disable */ | |
/*********************************** WARNING **********************************/ | |
/* THIS IS AUTOGENERATED FILE */ | |
/* */ | |
/* Do not edit this file. If you need to do any changes edit GraphQL source */ | |
/******************************************************************************/ | |
// Generated from ${resourcePath}: | |
/******************************************************************************/ | |
${lines.map(line => `// ${line}`).join('\n')} | |
/******************************************************************************/ | |
import { DocumentNode } from 'graphql'; | |
const doc: DocumentNode = ${JSON.stringify(doc)}; | |
(doc.loc as any).source = ${JSON.stringify(doc.loc.source)}; | |
`; | |
let outputCode = ''; | |
// Allow multiple query/mutation definitions in a file. This parses out dependencies | |
// at compile time, and then uses those at load time to create minimal query documents | |
// We cannot do the latter at compile time due to how the #import code works. | |
const operationCount = doc.definitions.reduce((accum, op) => { | |
if (op.kind === 'OperationDefinition') { | |
return accum + 1; | |
} | |
return accum; | |
}, 0); | |
if (operationCount < 1) { | |
outputCode += ` | |
export default doc; | |
`; | |
} else { | |
outputCode += ` | |
// Collect any fragment/type references from a node, adding them to the refs Set | |
function collectFragmentReferences(node, refs) { | |
if (node.kind === "FragmentSpread") { | |
refs.add(node.name.value); | |
} else if (node.kind === "VariableDefinition") { | |
var type = node.type; | |
if (type.kind === "NamedType") { | |
refs.add(type.name.value); | |
} | |
} | |
if (node.selectionSet) { | |
node.selectionSet.selections.forEach(function(selection) { | |
collectFragmentReferences(selection, refs); | |
}); | |
} | |
if (node.variableDefinitions) { | |
node.variableDefinitions.forEach(function(def) { | |
collectFragmentReferences(def, refs); | |
}); | |
} | |
if (node.definitions) { | |
node.definitions.forEach(function(def) { | |
collectFragmentReferences(def, refs); | |
}); | |
} | |
} | |
var definitionRefs = {}; | |
(function extractReferences() { | |
doc.definitions.forEach(function(def) { | |
if (def.name) { | |
var refs = new Set(); | |
collectFragmentReferences(def, refs); | |
definitionRefs[def.name.value] = refs; | |
} | |
}); | |
})(); | |
function findOperation(doc: DocumentNode, name: string) { | |
for (var i = 0; i < doc.definitions.length; i++) { | |
var element = doc.definitions[i]; | |
if (element.name && element.name.value == name) { | |
return element; | |
} | |
} | |
} | |
function oneQuery(doc: DocumentNode, operationName: string): DocumentNode { | |
// Copy the DocumentNode, but clear out the definitions | |
const newDoc: DocumentNode = { | |
kind: doc.kind, | |
definitions: [findOperation(doc, operationName)] | |
}; | |
if (doc.hasOwnProperty("loc")) { | |
newDoc.loc = doc.loc; | |
} | |
// Now, for the operation we're running, find any fragments referenced by | |
// it or the fragments it references | |
var opRefs = definitionRefs[operationName] || new Set(); | |
var allRefs = new Set(); | |
var newRefs = new Set(); | |
// IE 11 doesn't support "new Set(iterable)", so we add the members of opRefs to newRefs one by one | |
opRefs.forEach(function(refName) { | |
newRefs.add(refName); | |
}); | |
while (newRefs.size > 0) { | |
var prevRefs = newRefs; | |
newRefs = new Set(); | |
prevRefs.forEach(function(refName) { | |
if (!allRefs.has(refName)) { | |
allRefs.add(refName); | |
var childRefs = definitionRefs[refName] || new Set(); | |
childRefs.forEach(function(childRef) { | |
newRefs.add(childRef); | |
}); | |
} | |
}); | |
} | |
allRefs.forEach(function(refName) { | |
var op = findOperation(doc, refName); | |
if (op) { | |
newDoc.definitions.push(op); | |
} | |
}); | |
return newDoc; | |
} | |
export default doc; | |
`; | |
for (const op of doc.definitions) { | |
if (op.kind === 'OperationDefinition') { | |
if (!op.name) { | |
if (operationCount > 1) { | |
throw 'Query/mutation names are required for a document with multiple definitions'; | |
} else { | |
continue; | |
} | |
} | |
const opName = op.name.value; | |
outputCode += ` | |
export const ${opName} = oneQuery(doc, "${opName}"); | |
`; | |
} | |
} | |
} | |
const importOutputCode = expandImports(lines, doc); | |
const allCode = `${headerCode}\n${importOutputCode}\n${outputCode}\n`; | |
const { resolveConfig, format } = require('prettier'); | |
const prettierConfig = await resolveConfig(tsName, { | |
editorconfig: true, | |
useCache: true, | |
}); | |
return format(allCode, { | |
...prettierConfig, | |
filepath: tsName, | |
parser: 'typescript', | |
}); | |
} | |
module.exports = function(source) { | |
this.cacheable(); | |
const callback = this.async(); | |
const { resourcePath } = this; | |
const tsName = `${this.resourcePath}.ts`; | |
const stringifedTsName = JSON.stringify(tsName); | |
const loaderDelegate = ` | |
// wraper for ${this.resourcePath} | |
export * from ${stringifedTsName}; | |
import doc from ${stringifedTsName}; | |
export default doc; | |
`; | |
const writeFile = async () => { | |
const allCode = await genSource(source, resourcePath, tsName); | |
fs.writeFile(tsName, allCode, err => { | |
callback(err, loaderDelegate); | |
}); | |
}; | |
fs.stat(tsName, (genErr, genStat) => { | |
if (genErr) return writeFile(); | |
fs.stat(this.resourcePath, (resourceErr, resourceStat) => { | |
if (resourceErr) return callback(resourceErr); | |
if (resourceStat.mtimeMs > genStat.mtimeMs) { | |
return writeFile(); | |
} | |
return callback(null, loaderDelegate); | |
}); | |
}); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment