Exemplo n.º 1
0
exports.onPreExtractQueries = async ({ store, getNodesByType }) => {
  const program = store.getState().program

  const CACHE_DIR = path.resolve(
    `${program.directory}/.cache/contentful/assets/`
  )
  await fs.ensureDir(CACHE_DIR)

  if (getNodesByType(`ContentfulAsset`).length == 0) {
    return
  }

  let gatsbyImageDoesNotExist = true
  try {
    require.resolve(`gatsby-image`)
    gatsbyImageDoesNotExist = false
  } catch (e) {
    // Ignore
  }

  if (gatsbyImageDoesNotExist) {
    return
  }

  // We have both gatsby-image installed as well as ImageSharp nodes so let's
  // add our fragments to .cache/fragments.
  await fs.copy(
    require.resolve(`gatsby-source-contentful/src/fragments.js`),
    `${program.directory}/.cache/fragments/contentful-asset-fragments.js`
  )
}
Exemplo n.º 2
0
/**
 * Starts a loki database. If the file already exists, it will be
 * loaded as the database state. If not, a new database will be
 * created. If `saveFile` is omitted, an in-memory DB will be created.
 *
 * @param {string} saveFile on disk file that the database will be
 * saved and loaded from. If this is omitted, an in-memory database
 * will be created instead
 * @returns {Promise} promise that is resolved once the database and
 * the existing state has been loaded (if there was an existing
 * saveFile)
 */
async function start({ saveFile } = {}) {
  if (saveFile && !_.isString(saveFile)) {
    throw new Error(`saveFile must be a path`)
  }
  if (saveFile) {
    const saveDir = path.dirname(saveFile)
    await fs.ensureDir(saveDir)
    await startFileDb(saveFile)
  } else {
    await startInMemory()
  }
  ensureNodeCollections(db)
}
var backupAttachments = function (attachments, callback) {
    var path = config.backupAttachmentsPath;
    fs.ensureDir(path, function(err) {
        if(err) console.log(err) // => null
        
        async.each(attachments, function(attachment, callback) {
            var file = path + "/" + attachment.filename;;
            var latestFile = path + "/" + attachment.filenameLatest;
            console.log("file", file);
            console.log("latestFile", latestFile);
            if(attachment.contentType == "application/zip") {
                fs.writeFile(file, attachment.contentBinary, 'binary', function (err) {
                    if(err) return callback(err);
                    fs.remove(latestFile, function(err) {
                        if (err) return callback(err);
                        fs.copy(file, latestFile, function(err) {
                          if (err) return callback(err);
                          callback(null);
                        });
                    });
                });
            } else {
                fs.outputFile(file, attachment.content, function(err) {
                    if(err) return callback(err);
                    // overwrite old latestFile if exists
                    fs.remove(latestFile, function(err) {
                        if (err) return callback(err);
                        fs.copy(file, latestFile, function(err) {
                          if (err) return callback(err);
                          callback(null);
                        });
                    });
                });
            } 

        }, callback);
    });
}
Exemplo n.º 4
0
/**
 * processRemoteNode
 * --
 * Request the remote file and return the fileNode
 *
 * @param {CreateRemoteFileNodePayload} options
 * @return {Promise<Object>} Resolves with the fileNode
 */
async function processRemoteNode({
  url,
  store,
  cache,
  createNode,
  parentNodeId,
  auth = {},
  createNodeId,
  ext,
  name,
}) {
  // Ensure our cache directory exists.
  const pluginCacheDir = path.join(
    store.getState().program.directory,
    CACHE_DIR,
    FS_PLUGIN_DIR
  )
  await fs.ensureDir(pluginCacheDir)

  // See if there's response headers for this url
  // from a previous request.
  const cachedHeaders = await cache.get(cacheId(url))
  const headers = {}

  // Add htaccess authentication if passed in. This isn't particularly
  // extensible. We should define a proper API that we validate.
  if (auth && (auth.htaccess_pass || auth.htaccess_user)) {
    headers.auth = `${auth.htaccess_user}:${auth.htaccess_pass}`
  }

  if (cachedHeaders && cachedHeaders.etag) {
    headers[`If-None-Match`] = cachedHeaders.etag
  }

  // Create the temp and permanent file names for the url.
  const digest = createHash(url)
  if (!name) {
    name = getRemoteFileName(url)
  }
  if (!ext) {
    ext = getRemoteFileExtension(url)
  }

  const tmpFilename = createFilePath(pluginCacheDir, `tmp-${digest}`, ext)

  // Fetch the file.
  const response = await requestRemoteNode(url, headers, tmpFilename)
  // Save the response headers for future requests.
  await cache.set(cacheId(url), response.headers)

  // If the user did not provide an extension and we couldn't get one from remote file, try and guess one
  if (ext === ``) {
    const buffer = readChunk.sync(tmpFilename, 0, fileType.minimumBytes)
    const filetype = fileType(buffer)
    if (filetype) {
      ext = `.${filetype.ext}`
    }
  }

  const filename = createFilePath(path.join(pluginCacheDir, digest), name, ext)
  // If the status code is 200, move the piped temp file to the real name.
  if (response.statusCode === 200) {
    await fs.move(tmpFilename, filename, { overwrite: true })
    // Else if 304, remove the empty response.
  } else {
    await fs.remove(tmpFilename)
  }

  // Create the file node.
  const fileNode = await createFileNode(filename, createNodeId, {})
  fileNode.internal.description = `File "${url}"`
  fileNode.parent = parentNodeId
  // Override the default plugin as gatsby-source-filesystem needs to
  // be the owner of File nodes or there'll be conflicts if any other
  // File nodes are created through normal usages of
  // gatsby-source-filesystem.
  createNode(fileNode, { name: `gatsby-source-filesystem` })

  return fileNode
}
Exemplo n.º 5
0
async function sqipSharp({ type, cache, getNodeAndSavePathDependency, store }) {
  const program = store.getState().program
  const cacheDir = resolve(`${program.directory}/.cache/sqip/`)

  await ensureDir(cacheDir)

  return {
    sqip: {
      type: new GraphQLObjectType({
        name: `Sqip`,
        fields: {
          svg: { type: GraphQLString },
          dataURI: { type: GraphQLString },
        },
      }),
      args: {
        blur: { type: GraphQLInt, defaultValue: 1 },
        numberOfPrimitives: { type: GraphQLInt, defaultValue: 10 },
        mode: { type: GraphQLInt, defaultValue: 0 },
        width: {
          type: GraphQLInt,
          defaultValue: 256,
        },
        height: {
          type: GraphQLInt,
        },
        grayscale: {
          type: GraphQLBoolean,
          defaultValue: false,
        },
        duotone: {
          type: DuotoneGradientType,
          defaultValue: false,
        },
        cropFocus: {
          type: ImageCropFocusType,
          defaultValue: sharp.strategy.attention,
        },
        rotate: {
          type: GraphQLInt,
          defaultValue: 0,
        },
      },
      async resolve(image, fieldArgs, context) {
        const {
          blur,
          numberOfPrimitives,
          mode,
          width,
          height,
          grayscale,
          duotone,
          cropFocus,
          rotate,
        } = fieldArgs

        const sharpArgs = {
          width,
          height,
          grayscale,
          duotone,
          cropFocus,
          rotate,
        }

        const file = getNodeAndSavePathDependency(image.parent, context.path)

        const job = await queueImageResizing({ file, args: sharpArgs })

        if (!(await fs.exists(job.absolutePath))) {
          debug(`Preparing ${file.name}`)
          await job.finishedPromise
        }

        const { absolutePath } = job

        return generateSqip({
          cache,
          cacheDir,
          absolutePath,
          numberOfPrimitives,
          blur,
          mode,
        })
      },
    },
  }
}
Exemplo n.º 6
0
async function sqipContentful({ type, cache, store }) {
  const {
    schemes: { ImageResizingBehavior, ImageCropFocusType },
  } = require(`gatsby-source-contentful`)

  const cacheImage = require(`gatsby-source-contentful/cache-image`)

  const program = store.getState().program
  const cacheDir = resolve(`${program.directory}/.cache/sqip/`)

  await ensureDir(cacheDir)

  return {
    sqip: {
      type: new GraphQLObjectType({
        name: `Sqip`,
        fields: {
          svg: { type: GraphQLString },
          dataURI: { type: GraphQLString },
        },
      }),
      args: {
        blur: {
          type: GraphQLInt,
          defaultValue: 1,
        },
        numberOfPrimitives: {
          type: GraphQLInt,
          defaultValue: 10,
        },
        mode: {
          type: GraphQLInt,
          defaultValue: 0,
        },
        width: {
          type: GraphQLInt,
          defaultValue: 256,
        },
        height: {
          type: GraphQLInt,
        },
        resizingBehavior: {
          type: ImageResizingBehavior,
        },
        cropFocus: {
          type: ImageCropFocusType,
          defaultValue: null,
        },
        background: {
          type: GraphQLString,
          defaultValue: null,
        },
      },
      async resolve(asset, fieldArgs, context) {
        const {
          file: { contentType },
        } = asset

        if (contentType.indexOf(`image/`) !== 0) {
          return null
        }

        const {
          blur,
          numberOfPrimitives,
          mode,
          resizingBehavior,
          cropFocus,
          background,
        } = fieldArgs

        let { width, height } = fieldArgs

        if (width && height) {
          const aspectRatio = height / width
          width = 256
          height = height * aspectRatio
        }

        const options = {
          width: 256,
          height,
          resizingBehavior,
          cropFocus,
          background,
        }

        const absolutePath = await cacheImage(store, asset, options)

        return generateSqip({
          cache,
          cacheDir,
          absolutePath,
          numberOfPrimitives,
          blur,
          mode,
        })
      },
    },
  }
}
Exemplo n.º 7
0
module.exports = async (args: BootstrapArgs) => {
  const spanArgs = args.parentSpan ? { childOf: args.parentSpan } : {}
  const bootstrapSpan = tracer.startSpan(`bootstrap`, spanArgs)

  const program = {
    ...args,
    // Fix program directory path for windows env.
    directory: slash(args.directory),
  }

  store.dispatch({
    type: `SET_PROGRAM`,
    payload: program,
  })

  // Try opening the site's gatsby-config.js file.
  let activity = report.activityTimer(`open and validate gatsby-config`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const config = await preferDefault(
    getConfigFile(program.directory, `gatsby-config`)
  )

  if (config && config.polyfill) {
    report.warn(
      `Support for custom Promise polyfills has been removed in Gatsby v2. We only support Babel 7's new automatic polyfilling behavior.`
    )
  }

  store.dispatch({
    type: `SET_SITE_CONFIG`,
    payload: config,
  })

  activity.end()

  activity = report.activityTimer(`load plugins`)
  activity.start()
  const flattenedPlugins = await loadPlugins(config)
  activity.end()

  // onPreInit
  activity = report.activityTimer(`onPreInit`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`onPreInit`, { parentSpan: activity.span })
  activity.end()

  // Delete html and css files from the public directory as we don't want
  // deleted pages and styles from previous builds to stick around.
  activity = report.activityTimer(
    `delete html and css files from previous builds`,
    {
      parentSpan: bootstrapSpan,
    }
  )
  activity.start()
  await del([
    `public/*.{html,css}`,
    `public/**/*.{html,css}`,
    `!public/static`,
    `!public/static/**/*.{html,css}`,
  ])
  activity.end()

  activity = report.activityTimer(`initialize cache`)
  activity.start()
  // Check if any plugins have been updated since our last run. If so
  // we delete the cache is there's likely been changes
  // since the previous run.
  //
  // We do this by creating a hash of all the version numbers of installed
  // plugins, the site's package.json, gatsby-config.js, and gatsby-node.js.
  // The last, gatsby-node.js, is important as many gatsby sites put important
  // logic in there e.g. generating slugs for custom pages.
  const pluginVersions = flattenedPlugins.map(p => p.version)
  const hashes = await Promise.all([
    md5File(`package.json`),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-config.js`).catch(() => {})
    ), // ignore as this file isn't required),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-node.js`).catch(() => {})
    ), // ignore as this file isn't required),
  ])
  const pluginsHash = crypto
    .createHash(`md5`)
    .update(JSON.stringify(pluginVersions.concat(hashes)))
    .digest(`hex`)
  let state = store.getState()
  const oldPluginsHash = state && state.status ? state.status.PLUGINS_HASH : ``

  // Check if anything has changed. If it has, delete the site's .cache
  // directory and tell reducers to empty themselves.
  //
  // Also if the hash isn't there, then delete things just in case something
  // is weird.
  if (oldPluginsHash && pluginsHash !== oldPluginsHash) {
    report.info(report.stripIndent`
      One or more of your plugins have changed since the last time you ran Gatsby. As
      a precaution, we're deleting your site's cache to ensure there's not any stale
      data
    `)
  }

  if (!oldPluginsHash || pluginsHash !== oldPluginsHash) {
    try {
      await fs.remove(`${program.directory}/.cache`)
    } catch (e) {
      report.error(`Failed to remove .cache files.`, e)
    }
    // Tell reducers to delete their data (the store will already have
    // been loaded from the file system cache).
    store.dispatch({
      type: `DELETE_CACHE`,
    })
  }

  // Update the store with the new plugins hash.
  store.dispatch({
    type: `UPDATE_PLUGINS_HASH`,
    payload: pluginsHash,
  })

  // Now that we know the .cache directory is safe, initialize the cache
  // directory.
  initCache()

  // Ensure the public/static directory
  await fs.ensureDir(`${program.directory}/public/static`)

  activity.end()

  // Copy our site files to the root of the site.
  activity = report.activityTimer(`copy gatsby files`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const srcDir = `${__dirname}/../../cache-dir`
  const siteDir = `${program.directory}/.cache`
  const tryRequire = `${__dirname}/../utils/test-require-error.js`
  try {
    await fs.copy(srcDir, siteDir, {
      clobber: true,
    })
    await fs.copy(tryRequire, `${siteDir}/test-require-error.js`, {
      clobber: true,
    })
    await fs.ensureDirSync(`${program.directory}/.cache/json`)

    // Ensure .cache/fragments exists and is empty. We want fragments to be
    // added on every run in response to data as fragments can only be added if
    // the data used to create the schema they're dependent on is available.
    await fs.emptyDir(`${program.directory}/.cache/fragments`)
  } catch (err) {
    report.panic(`Unable to copy site files to .cache`, err)
  }

  // Find plugins which implement gatsby-browser and gatsby-ssr and write
  // out api-runners for them.
  const hasAPIFile = (env, plugin) => {
    // The plugin loader has disabled SSR APIs for this plugin. Usually due to
    // multiple implementations of an API that can only be implemented once
    if (env === `ssr` && plugin.skipSSR === true) return undefined

    const envAPIs = plugin[`${env}APIs`]

    // Always include the site's gatsby-browser.js if it exists as it's
    // a handy place to include global styles and other global imports.
    try {
      if (env === `browser` && plugin.name === `default-site-plugin`) {
        return slash(
          require.resolve(path.join(plugin.resolve, `gatsby-${env}`))
        )
      }
    } catch (e) {
      // ignore
    }

    if (envAPIs && Array.isArray(envAPIs) && envAPIs.length > 0) {
      return slash(path.join(plugin.resolve, `gatsby-${env}`))
    }
    return undefined
  }

  const ssrPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`ssr`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )

  const browserPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`browser`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )

  const browserPluginsRequires = browserPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)

  const browserAPIRunner = `module.exports = [${browserPluginsRequires}]\n`

  let sSRAPIRunner = ``

  try {
    sSRAPIRunner = fs.readFileSync(`${siteDir}/api-runner-ssr.js`, `utf-8`)
  } catch (err) {
    report.panic(`Failed to read ${siteDir}/api-runner-ssr.js`, err)
  }

  const ssrPluginsRequires = ssrPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)
  sSRAPIRunner = `var plugins = [${ssrPluginsRequires}]\n${sSRAPIRunner}`

  fs.writeFileSync(
    `${siteDir}/api-runner-browser-plugins.js`,
    browserAPIRunner,
    `utf-8`
  )
  fs.writeFileSync(`${siteDir}/api-runner-ssr.js`, sSRAPIRunner, `utf-8`)

  activity.end()
  /**
   * Start the main bootstrap processes.
   */

  // onPreBootstrap
  activity = report.activityTimer(`onPreBootstrap`)
  activity.start()
  await apiRunnerNode(`onPreBootstrap`)
  activity.end()

  // Source nodes
  activity = report.activityTimer(`source and transform nodes`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../utils/source-nodes`)({ parentSpan: activity.span })
  activity.end()

  // Create Schema.
  activity = report.activityTimer(`building schema`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../schema`)({ parentSpan: activity.span })
  activity.end()

  // Collect resolvable extensions and attach to program.
  const extensions = [`.mjs`, `.js`, `.jsx`, `.wasm`, `.json`]
  // Change to this being an action and plugins implement `onPreBootstrap`
  // for adding extensions.
  const apiResults = await apiRunnerNode(`resolvableExtensions`, {
    traceId: `initial-resolvableExtensions`,
    parentSpan: bootstrapSpan,
  })

  store.dispatch({
    type: `SET_PROGRAM_EXTENSIONS`,
    payload: _.flattenDeep([extensions, apiResults]),
  })

  const graphqlRunner = (query, context = {}) => {
    const schema = store.getState().schema
    return graphql(schema, query, context, context, context)
  }

  // Collect pages.
  activity = report.activityTimer(`createPages`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`createPages`, {
    graphql: graphqlRunner,
    traceId: `initial-createPages`,
    waitForCascadingActions: true,
    parentSpan: activity.span,
  })
  activity.end()

  // A variant on createPages for plugins that want to
  // have full control over adding/removing pages. The normal
  // "createPages" API is called every time (during development)
  // that data changes.
  activity = report.activityTimer(`createPagesStatefully`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`createPagesStatefully`, {
    graphql: graphqlRunner,
    traceId: `initial-createPagesStatefully`,
    waitForCascadingActions: true,
    parentSpan: activity.span,
  })
  activity.end()

  activity = report.activityTimer(`onPreExtractQueries`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`onPreExtractQueries`, { parentSpan: activity.span })
  activity.end()

  // Update Schema for SitePage.
  activity = report.activityTimer(`update schema`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../schema`)({ parentSpan: activity.span })
  activity.end()

  require(`../schema/type-conflict-reporter`).printConflicts()

  // Extract queries
  activity = report.activityTimer(`extract queries from components`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await extractQueries()
  activity.end()

  // Start the createPages hot reloader.
  if (process.env.NODE_ENV !== `production`) {
    require(`./page-hot-reloader`)(graphqlRunner)
  }

  // Run queries
  activity = report.activityTimer(`run graphql queries`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const startQueries = process.hrtime()
  queryQueue.on(`task_finish`, () => {
    const stats = queryQueue.getStats()
    activity.setStatus(
      `${stats.total}/${stats.peak} ${(
        stats.total / convertHrtime(process.hrtime(startQueries)).seconds
      ).toFixed(2)} queries/second`
    )
  })
  await runInitialQueries(activity)
  activity.end()

  // Write out files.
  activity = report.activityTimer(`write out page data`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  try {
    await writePages()
  } catch (err) {
    report.panic(`Failed to write out page data`, err)
  }
  activity.end()

  // Write out redirects.
  activity = report.activityTimer(`write out redirect data`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await writeRedirects()
  activity.end()

  const checkJobsDone = _.debounce(resolve => {
    const state = store.getState()
    if (state.jobs.active.length === 0) {
      report.log(``)
      report.info(`bootstrap finished - ${process.uptime()} s`)
      report.log(``)

      // onPostBootstrap
      activity = report.activityTimer(`onPostBootstrap`, {
        parentSpan: bootstrapSpan,
      })
      activity.start()
      apiRunnerNode(`onPostBootstrap`, { parentSpan: activity.span }).then(
        () => {
          activity.end()
          bootstrapSpan.finish()
          resolve({ graphqlRunner })
        }
      )
    }
  }, 100)

  if (store.getState().jobs.active.length === 0) {
    // onPostBootstrap
    activity = report.activityTimer(`onPostBootstrap`, {
      parentSpan: bootstrapSpan,
    })
    activity.start()
    await apiRunnerNode(`onPostBootstrap`, { parentSpan: activity.span })
    activity.end()

    bootstrapSpan.finish()

    report.log(``)
    report.info(`bootstrap finished - ${process.uptime()} s`)
    report.log(``)
    emitter.emit(`BOOTSTRAP_FINISHED`)
    return {
      graphqlRunner,
    }
  } else {
    return new Promise(resolve => {
      // Wait until all side effect jobs are finished.
      emitter.on(`END_JOB`, () => checkJobsDone(resolve))
    })
  }
}
Exemplo n.º 8
0
  new Promise(async (resolve, reject) => {
    if (!url || isWebUri(url) === undefined) {
      resolve()
      return
    }

    // Ensure our cache directory exists.
    await fs.ensureDir(
      path.join(
        store.getState().program.directory,
        `.cache`,
        `gatsby-source-filesystem`
      )
    )

    // See if there's response headers for this url
    // from a previous request.
    const cachedHeaders = await cache.get(cacheId(url))
    const headers = {}

    // Add htaccess authentication if passed in. This isn't particularly
    // extensible. We should define a proper API that we validate.
    if (auth && auth.htaccess_pass && auth.htaccess_user) {
      headers.auth = `${auth.htaccess_user}:${auth.htaccess_pass}`
    }

    if (cachedHeaders && cachedHeaders.etag) {
      headers[`If-None-Match`] = cachedHeaders.etag
    }

    // Create the temp and permanent file names for the url.
    const digest = crypto
      .createHash(`md5`)
      .update(url)
      .digest(`hex`)
    const tmpFilename = path.join(
      store.getState().program.directory,
      `.cache`,
      `gatsby-source-filesystem`,
      `tmp-` + digest + path.parse(url).ext
    )
    const filename = path.join(
      store.getState().program.directory,
      `.cache`,
      `gatsby-source-filesystem`,
      digest + path.parse(url).ext
    )

    // Fetch the file.
    let statusCode
    let responseHeaders
    let responseError = false
    const responseStream = got.stream(url, headers)
    responseStream.pipe(fs.createWriteStream(tmpFilename))
    responseStream.on(`downloadProgress`, pro => console.log(pro))

    // If there's a 400/500 response or other error.
    responseStream.on(`error`, (error, body, response) => {
      responseError = true
      fs.removeSync(tmpFilename)
      reject(error, body, response)
    })

    // If the status code is 200, move the piped temp file to the real name.
    // Else if 304, remove the empty response.
    responseStream.on(`response`, response => {
      statusCode = response.statusCode
      responseHeaders = response.headers
    })

    responseStream.on(`end`, response => {
      if (responseError) return

      // Save the response headers for future requests.
      cache.set(cacheId(url), responseHeaders)
      if (statusCode === 200) {
        fs.moveSync(tmpFilename, filename, { overwrite: true })
      } else {
        fs.removeSync(tmpFilename)
      }

      // Create the file node and return.
      createFileNode(filename, {}).then(fileNode => {
        // Override the default plugin as gatsby-source-filesystem needs to
        // be the owner of File nodes or there'll be conflicts if any other
        // File nodes are created through normal usages of
        // gatsby-source-filesystem.
        createNode(fileNode, { name: `gatsby-source-filesystem` })
        resolve(fileNode)
      })
    })
  })