Exemplo n.º 1
0
const digest = str =>
  crypto
    .createHash(`md5`)
    .update(str)
    .digest(`hex`)
Exemplo n.º 2
0
module.exports = async (args: BootstrapArgs) => {
  const spanArgs = args.parentSpan ? { childOf: args.parentSpan } : {}
  const bootstrapSpan = tracer.startSpan(`bootstrap`, spanArgs)

  const program = {
    ...args,
    // Fix program directory path for windows env.
    directory: slash(args.directory),
  }

  store.dispatch({
    type: `SET_PROGRAM`,
    payload: program,
  })

  // Try opening the site's gatsby-config.js file.
  let activity = report.activityTimer(`open and validate gatsby-config`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const config = await preferDefault(
    getConfigFile(program.directory, `gatsby-config`)
  )

  if (config && config.polyfill) {
    report.warn(
      `Support for custom Promise polyfills has been removed in Gatsby v2. We only support Babel 7's new automatic polyfilling behavior.`
    )
  }

  store.dispatch({
    type: `SET_SITE_CONFIG`,
    payload: config,
  })

  activity.end()

  activity = report.activityTimer(`load plugins`)
  activity.start()
  const flattenedPlugins = await loadPlugins(config)
  activity.end()

  // onPreInit
  activity = report.activityTimer(`onPreInit`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`onPreInit`, { parentSpan: activity.span })
  activity.end()

  // Delete html and css files from the public directory as we don't want
  // deleted pages and styles from previous builds to stick around.
  activity = report.activityTimer(
    `delete html and css files from previous builds`,
    {
      parentSpan: bootstrapSpan,
    }
  )
  activity.start()
  await del([
    `public/*.{html,css}`,
    `public/**/*.{html,css}`,
    `!public/static`,
    `!public/static/**/*.{html,css}`,
  ])
  activity.end()

  activity = report.activityTimer(`initialize cache`)
  activity.start()
  // Check if any plugins have been updated since our last run. If so
  // we delete the cache is there's likely been changes
  // since the previous run.
  //
  // We do this by creating a hash of all the version numbers of installed
  // plugins, the site's package.json, gatsby-config.js, and gatsby-node.js.
  // The last, gatsby-node.js, is important as many gatsby sites put important
  // logic in there e.g. generating slugs for custom pages.
  const pluginVersions = flattenedPlugins.map(p => p.version)
  const hashes = await Promise.all([
    md5File(`package.json`),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-config.js`).catch(() => {})
    ), // ignore as this file isn't required),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-node.js`).catch(() => {})
    ), // ignore as this file isn't required),
  ])
  const pluginsHash = crypto
    .createHash(`md5`)
    .update(JSON.stringify(pluginVersions.concat(hashes)))
    .digest(`hex`)
  let state = store.getState()
  const oldPluginsHash = state && state.status ? state.status.PLUGINS_HASH : ``

  // Check if anything has changed. If it has, delete the site's .cache
  // directory and tell reducers to empty themselves.
  //
  // Also if the hash isn't there, then delete things just in case something
  // is weird.
  if (oldPluginsHash && pluginsHash !== oldPluginsHash) {
    report.info(report.stripIndent`
      One or more of your plugins have changed since the last time you ran Gatsby. As
      a precaution, we're deleting your site's cache to ensure there's not any stale
      data
    `)
  }

  if (!oldPluginsHash || pluginsHash !== oldPluginsHash) {
    try {
      await fs.remove(`${program.directory}/.cache`)
    } catch (e) {
      report.error(`Failed to remove .cache files.`, e)
    }
    // Tell reducers to delete their data (the store will already have
    // been loaded from the file system cache).
    store.dispatch({
      type: `DELETE_CACHE`,
    })
  }

  // Update the store with the new plugins hash.
  store.dispatch({
    type: `UPDATE_PLUGINS_HASH`,
    payload: pluginsHash,
  })

  // Now that we know the .cache directory is safe, initialize the cache
  // directory.
  initCache()

  // Ensure the public/static directory
  await fs.ensureDir(`${program.directory}/public/static`)

  activity.end()

  // Copy our site files to the root of the site.
  activity = report.activityTimer(`copy gatsby files`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const srcDir = `${__dirname}/../../cache-dir`
  const siteDir = `${program.directory}/.cache`
  const tryRequire = `${__dirname}/../utils/test-require-error.js`
  try {
    await fs.copy(srcDir, siteDir, {
      clobber: true,
    })
    await fs.copy(tryRequire, `${siteDir}/test-require-error.js`, {
      clobber: true,
    })
    await fs.ensureDirSync(`${program.directory}/.cache/json`)

    // Ensure .cache/fragments exists and is empty. We want fragments to be
    // added on every run in response to data as fragments can only be added if
    // the data used to create the schema they're dependent on is available.
    await fs.emptyDir(`${program.directory}/.cache/fragments`)
  } catch (err) {
    report.panic(`Unable to copy site files to .cache`, err)
  }

  // Find plugins which implement gatsby-browser and gatsby-ssr and write
  // out api-runners for them.
  const hasAPIFile = (env, plugin) => {
    // The plugin loader has disabled SSR APIs for this plugin. Usually due to
    // multiple implementations of an API that can only be implemented once
    if (env === `ssr` && plugin.skipSSR === true) return undefined

    const envAPIs = plugin[`${env}APIs`]

    // Always include the site's gatsby-browser.js if it exists as it's
    // a handy place to include global styles and other global imports.
    try {
      if (env === `browser` && plugin.name === `default-site-plugin`) {
        return slash(
          require.resolve(path.join(plugin.resolve, `gatsby-${env}`))
        )
      }
    } catch (e) {
      // ignore
    }

    if (envAPIs && Array.isArray(envAPIs) && envAPIs.length > 0) {
      return slash(path.join(plugin.resolve, `gatsby-${env}`))
    }
    return undefined
  }

  const ssrPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`ssr`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )

  const browserPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`browser`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )

  const browserPluginsRequires = browserPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)

  const browserAPIRunner = `module.exports = [${browserPluginsRequires}]\n`

  let sSRAPIRunner = ``

  try {
    sSRAPIRunner = fs.readFileSync(`${siteDir}/api-runner-ssr.js`, `utf-8`)
  } catch (err) {
    report.panic(`Failed to read ${siteDir}/api-runner-ssr.js`, err)
  }

  const ssrPluginsRequires = ssrPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)
  sSRAPIRunner = `var plugins = [${ssrPluginsRequires}]\n${sSRAPIRunner}`

  fs.writeFileSync(
    `${siteDir}/api-runner-browser-plugins.js`,
    browserAPIRunner,
    `utf-8`
  )
  fs.writeFileSync(`${siteDir}/api-runner-ssr.js`, sSRAPIRunner, `utf-8`)

  activity.end()
  /**
   * Start the main bootstrap processes.
   */

  // onPreBootstrap
  activity = report.activityTimer(`onPreBootstrap`)
  activity.start()
  await apiRunnerNode(`onPreBootstrap`)
  activity.end()

  // Source nodes
  activity = report.activityTimer(`source and transform nodes`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../utils/source-nodes`)({ parentSpan: activity.span })
  activity.end()

  // Create Schema.
  activity = report.activityTimer(`building schema`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../schema`)({ parentSpan: activity.span })
  activity.end()

  // Collect resolvable extensions and attach to program.
  const extensions = [`.mjs`, `.js`, `.jsx`, `.wasm`, `.json`]
  // Change to this being an action and plugins implement `onPreBootstrap`
  // for adding extensions.
  const apiResults = await apiRunnerNode(`resolvableExtensions`, {
    traceId: `initial-resolvableExtensions`,
    parentSpan: bootstrapSpan,
  })

  store.dispatch({
    type: `SET_PROGRAM_EXTENSIONS`,
    payload: _.flattenDeep([extensions, apiResults]),
  })

  const graphqlRunner = (query, context = {}) => {
    const schema = store.getState().schema
    return graphql(schema, query, context, context, context)
  }

  // Collect pages.
  activity = report.activityTimer(`createPages`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`createPages`, {
    graphql: graphqlRunner,
    traceId: `initial-createPages`,
    waitForCascadingActions: true,
    parentSpan: activity.span,
  })
  activity.end()

  // A variant on createPages for plugins that want to
  // have full control over adding/removing pages. The normal
  // "createPages" API is called every time (during development)
  // that data changes.
  activity = report.activityTimer(`createPagesStatefully`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`createPagesStatefully`, {
    graphql: graphqlRunner,
    traceId: `initial-createPagesStatefully`,
    waitForCascadingActions: true,
    parentSpan: activity.span,
  })
  activity.end()

  activity = report.activityTimer(`onPreExtractQueries`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await apiRunnerNode(`onPreExtractQueries`, { parentSpan: activity.span })
  activity.end()

  // Update Schema for SitePage.
  activity = report.activityTimer(`update schema`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await require(`../schema`)({ parentSpan: activity.span })
  activity.end()

  require(`../schema/type-conflict-reporter`).printConflicts()

  // Extract queries
  activity = report.activityTimer(`extract queries from components`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await extractQueries()
  activity.end()

  // Start the createPages hot reloader.
  if (process.env.NODE_ENV !== `production`) {
    require(`./page-hot-reloader`)(graphqlRunner)
  }

  // Run queries
  activity = report.activityTimer(`run graphql queries`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  const startQueries = process.hrtime()
  queryQueue.on(`task_finish`, () => {
    const stats = queryQueue.getStats()
    activity.setStatus(
      `${stats.total}/${stats.peak} ${(
        stats.total / convertHrtime(process.hrtime(startQueries)).seconds
      ).toFixed(2)} queries/second`
    )
  })
  await runInitialQueries(activity)
  activity.end()

  // Write out files.
  activity = report.activityTimer(`write out page data`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  try {
    await writePages()
  } catch (err) {
    report.panic(`Failed to write out page data`, err)
  }
  activity.end()

  // Write out redirects.
  activity = report.activityTimer(`write out redirect data`, {
    parentSpan: bootstrapSpan,
  })
  activity.start()
  await writeRedirects()
  activity.end()

  const checkJobsDone = _.debounce(resolve => {
    const state = store.getState()
    if (state.jobs.active.length === 0) {
      report.log(``)
      report.info(`bootstrap finished - ${process.uptime()} s`)
      report.log(``)

      // onPostBootstrap
      activity = report.activityTimer(`onPostBootstrap`, {
        parentSpan: bootstrapSpan,
      })
      activity.start()
      apiRunnerNode(`onPostBootstrap`, { parentSpan: activity.span }).then(
        () => {
          activity.end()
          bootstrapSpan.finish()
          resolve({ graphqlRunner })
        }
      )
    }
  }, 100)

  if (store.getState().jobs.active.length === 0) {
    // onPostBootstrap
    activity = report.activityTimer(`onPostBootstrap`, {
      parentSpan: bootstrapSpan,
    })
    activity.start()
    await apiRunnerNode(`onPostBootstrap`, { parentSpan: activity.span })
    activity.end()

    bootstrapSpan.finish()

    report.log(``)
    report.info(`bootstrap finished - ${process.uptime()} s`)
    report.log(``)
    emitter.emit(`BOOTSTRAP_FINISHED`)
    return {
      graphqlRunner,
    }
  } else {
    return new Promise(resolve => {
      // Wait until all side effect jobs are finished.
      emitter.on(`END_JOB`, () => checkJobsDone(resolve))
    })
  }
}
Exemplo n.º 3
0
  new Promise(async (resolve, reject) => {
    if (!url || isWebUri(url) === undefined) {
      resolve()
      return
    }

    // Ensure our cache directory exists.
    await fs.ensureDir(
      path.join(
        store.getState().program.directory,
        `.cache`,
        `gatsby-source-filesystem`
      )
    )

    // See if there's response headers for this url
    // from a previous request.
    const cachedHeaders = await cache.get(cacheId(url))
    const headers = {}

    // Add htaccess authentication if passed in. This isn't particularly
    // extensible. We should define a proper API that we validate.
    if (auth && auth.htaccess_pass && auth.htaccess_user) {
      headers.auth = `${auth.htaccess_user}:${auth.htaccess_pass}`
    }

    if (cachedHeaders && cachedHeaders.etag) {
      headers[`If-None-Match`] = cachedHeaders.etag
    }

    // Create the temp and permanent file names for the url.
    const digest = crypto
      .createHash(`md5`)
      .update(url)
      .digest(`hex`)
    const tmpFilename = path.join(
      store.getState().program.directory,
      `.cache`,
      `gatsby-source-filesystem`,
      `tmp-` + digest + path.parse(url).ext
    )
    const filename = path.join(
      store.getState().program.directory,
      `.cache`,
      `gatsby-source-filesystem`,
      digest + path.parse(url).ext
    )

    // Fetch the file.
    let statusCode
    let responseHeaders
    let responseError = false
    const responseStream = got.stream(url, headers)
    responseStream.pipe(fs.createWriteStream(tmpFilename))
    responseStream.on(`downloadProgress`, pro => console.log(pro))

    // If there's a 400/500 response or other error.
    responseStream.on(`error`, (error, body, response) => {
      responseError = true
      fs.removeSync(tmpFilename)
      reject(error, body, response)
    })

    // If the status code is 200, move the piped temp file to the real name.
    // Else if 304, remove the empty response.
    responseStream.on(`response`, response => {
      statusCode = response.statusCode
      responseHeaders = response.headers
    })

    responseStream.on(`end`, response => {
      if (responseError) return

      // Save the response headers for future requests.
      cache.set(cacheId(url), responseHeaders)
      if (statusCode === 200) {
        fs.moveSync(tmpFilename, filename, { overwrite: true })
      } else {
        fs.removeSync(tmpFilename)
      }

      // Create the file node and return.
      createFileNode(filename, {}).then(fileNode => {
        // Override the default plugin as gatsby-source-filesystem needs to
        // be the owner of File nodes or there'll be conflicts if any other
        // File nodes are created through normal usages of
        // gatsby-source-filesystem.
        createNode(fileNode, { name: `gatsby-source-filesystem` })
        resolve(fileNode)
      })
    })
  })
Exemplo n.º 4
0
exports.run = async (browser, url, width, height) => {
  console.log(`Invoked: ${url} (${width}x${height})`)

  if (!process.env.S3_BUCKET) {
    throw new Error(
      `Provide the S3 bucket to use by adding an S3_BUCKET` +
        ` environment variable to this Lambda's configuration`
    )
  }

  const region = await s3GetBucketLocation(process.env.S3_BUCKET)

  if (!region) {
    throw new Error(`invalid bucket ${process.env.S3_BUCKET}`)
  }

  const keyBase = `${url}-(${width},${height})`
  const digest = crypto
    .createHash(`md5`)
    .update(keyBase)
    .digest(`hex`)
  const key = `${digest}.png`

  const screenshotUrl = `https://s3-${region}.amazonaws.com/${
    process.env.S3_BUCKET
  }/${key}`

  const metadata = await s3HeadObject(key)

  const now = new Date()
  if (metadata) {
    if (metadata.Expiration) {
      const expires = getDateFromExpiration(metadata.Expiration)
      if (now < expires) {
        console.log(`Returning cached screenshot`)
        return { url: screenshotUrl, expires }
      }
    } else {
      throw new Error(`no expiration date set`)
    }
  }

  console.log(`Taking new screenshot`)

  const page = await browser.newPage()

  await page.setViewport({ width, height, deviceScaleFactor: 2 })
  await page.goto(url, { waitUntil: [`load`, `networkidle0`] })
  // wait for full-size images to fade in
  await page.waitFor(1000);

  const screenshot = await page.screenshot()
  const up = await s3PutObject(key, screenshot)

  await page.close()

  let expires

  if (up && up.Expiration) {
    expires = getDateFromExpiration(up.Expiration)
  }

  return { url: screenshotUrl, expires }
}
Exemplo n.º 5
0
    return _regenerator2.default.wrap(function _callee$(_context) {
      while (1) {
        switch (_context.prev = _context.next) {
          case 0:
            program = (0, _extends3.default)({}, args, {
              // Fix program directory path for windows env.
              directory: slash(args.directory)
            });


            store.dispatch({
              type: `SET_PROGRAM`,
              payload: program
            });

            // Delete html files from the public directory as we don't want deleted
            // pages from previous builds to stick around.
            activity = report.activityTimer(`delete html files from previous builds`);

            activity.start();
            _context.next = 6;
            return del([`public/*.html`, `public/**/*.html`, `!public/static`, `!public/static/**/*.html`]);

          case 6:
            activity.end();

            // Try opening the site's gatsby-config.js file.
            activity = report.activityTimer(`open and validate gatsby-config.js`);
            activity.start();
            config = void 0;

            try {
              // $FlowFixMe
              config = preferDefault(require(`${program.directory}/gatsby-config`));
            } catch (err) {
              if (!testRequireError(`${program.directory}/gatsby-config`, err)) {
                report.error(`Could not load gatsby-config`, err);
                process.exit(1);
              }
            }

            store.dispatch({
              type: `SET_SITE_CONFIG`,
              payload: config
            });

            activity.end();

            _context.next = 15;
            return loadPlugins(config);

          case 15:
            flattenedPlugins = _context.sent;


            // Check if any plugins have been updated since our last run. If so
            // we delete the cache is there's likely been changes
            // since the previous run.
            //
            // We do this by creating a hash of all the version numbers of installed
            // plugins, the site's package.json, gatsby-config.js, and gatsby-node.js.
            // The last, gatsby-node.js, is important as many gatsby sites put important
            // logic in there e.g. generating slugs for custom pages.
            pluginVersions = flattenedPlugins.map(function (p) {
              return p.version;
            });
            _context.next = 19;
            return Promise.all([md5File(`package.json`), Promise.resolve(md5File(`${program.directory}/gatsby-config.js`).catch(function () {})), // ignore as this file isn't required),
            Promise.resolve(md5File(`${program.directory}/gatsby-node.js`).catch(function () {}))] // ignore as this file isn't required),
            );

          case 19:
            hashes = _context.sent;
            pluginsHash = crypto.createHash(`md5`).update(JSON.stringify(pluginVersions.concat(hashes))).digest(`hex`);
            state = store.getState();
            oldPluginsHash = state && state.status ? state.status.PLUGINS_HASH : ``;

            // Check if anything has changed. If it has, delete the site's .cache
            // directory and tell reducers to empty themselves.
            //
            // Also if the hash isn't there, then delete things just in case something
            // is weird.

            if (oldPluginsHash && pluginsHash !== oldPluginsHash) {
              report.info(report.stripIndent`
      One or more of your plugins have changed since the last time you ran Gatsby. As
      a precaution, we're deleting your site's cache to ensure there's not any stale
      data
    `);
            }

            if (!(!oldPluginsHash || pluginsHash !== oldPluginsHash)) {
              _context.next = 34;
              break;
            }

            _context.prev = 25;
            _context.next = 28;
            return fs.remove(`${program.directory}/.cache`);

          case 28:
            _context.next = 33;
            break;

          case 30:
            _context.prev = 30;
            _context.t0 = _context["catch"](25);

            report.error(`Failed to remove .cache files.`, _context.t0);

          case 33:
            // Tell reducers to delete their data (the store will already have
            // been loaded from the file system cache).
            store.dispatch({
              type: `DELETE_CACHE`
            });

          case 34:

            // Update the store with the new plugins hash.
            store.dispatch({
              type: `UPDATE_PLUGINS_HASH`,
              payload: pluginsHash
            });

            // Now that we know the .cache directory is safe, initialize the cache
            // directory.
            initCache();

            // Ensure the public/static directory is created.
            _context.next = 38;
            return fs.ensureDirSync(`${program.directory}/public/static`);

          case 38:

            // Copy our site files to the root of the site.
            activity = report.activityTimer(`copy gatsby files`);
            activity.start();
            srcDir = `${__dirname}/../../cache-dir`;
            siteDir = `${program.directory}/.cache`;
            tryRequire = `${__dirname}/../utils/test-require-error.js`;
            _context.prev = 43;
            _context.next = 46;
            return fs.copy(srcDir, siteDir, { clobber: true });

          case 46:
            _context.next = 48;
            return fs.copy(tryRequire, `${siteDir}/test-require-error.js`, {
              clobber: true
            });

          case 48:
            _context.next = 50;
            return fs.ensureDirSync(`${program.directory}/.cache/json`);

          case 50:
            _context.next = 52;
            return fs.ensureDirSync(`${program.directory}/.cache/layouts`);

          case 52:
            _context.next = 54;
            return fs.emptyDir(`${program.directory}/.cache/fragments`);

          case 54:
            _context.next = 59;
            break;

          case 56:
            _context.prev = 56;
            _context.t1 = _context["catch"](43);

            report.panic(`Unable to copy site files to .cache`, _context.t1);

          case 59:

            // Find plugins which implement gatsby-browser and gatsby-ssr and write
            // out api-runners for them.
            hasAPIFile = function hasAPIFile(env, plugin) {
              return (
                // TODO make this async...
                glob.sync(`${plugin.resolve}/gatsby-${env}*`)[0]
              );
            };

            ssrPlugins = _.filter(flattenedPlugins.map(function (plugin) {
              return {
                resolve: hasAPIFile(`ssr`, plugin),
                options: plugin.pluginOptions
              };
            }), function (plugin) {
              return plugin.resolve;
            });
            browserPlugins = _.filter(flattenedPlugins.map(function (plugin) {
              return {
                resolve: hasAPIFile(`browser`, plugin),
                options: plugin.pluginOptions
              };
            }), function (plugin) {
              return plugin.resolve;
            });
            browserAPIRunner = ``;


            try {
              browserAPIRunner = fs.readFileSync(`${siteDir}/api-runner-browser.js`, `utf-8`);
            } catch (err) {
              report.panic(`Failed to read ${siteDir}/api-runner-browser.js`, err);
            }

            browserPluginsRequires = browserPlugins.map(function (plugin) {
              return `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`;
            }).join(`,`);


            browserAPIRunner = `var plugins = [${browserPluginsRequires}]\n${browserAPIRunner}`;

            sSRAPIRunner = ``;


            try {
              sSRAPIRunner = fs.readFileSync(`${siteDir}/api-runner-ssr.js`, `utf-8`);
            } catch (err) {
              report.panic(`Failed to read ${siteDir}/api-runner-ssr.js`, err);
            }

            ssrPluginsRequires = ssrPlugins.map(function (plugin) {
              return `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`;
            }).join(`,`);

            sSRAPIRunner = `var plugins = [${ssrPluginsRequires}]\n${sSRAPIRunner}`;

            fs.writeFileSync(`${siteDir}/api-runner-browser.js`, browserAPIRunner, `utf-8`);
            fs.writeFileSync(`${siteDir}/api-runner-ssr.js`, sSRAPIRunner, `utf-8`);

            activity.end();
            /**
             * Start the main bootstrap processes.
             */

            // onPreBootstrap
            activity = report.activityTimer(`onPreBootstrap`);
            activity.start();
            _context.next = 77;
            return apiRunnerNode(`onPreBootstrap`);

          case 77:
            activity.end();

            // Source nodes
            activity = report.activityTimer(`source and transform nodes`);
            activity.start();
            _context.next = 82;
            return require(`../utils/source-nodes`)();

          case 82:
            activity.end();

            // Create Schema.
            activity = report.activityTimer(`building schema`);
            activity.start();
            _context.next = 87;
            return require(`../schema`)();

          case 87:
            activity.end();

            // Collect resolvable extensions and attach to program.
            extensions = [`.js`, `.jsx`];
            // Change to this being an action and plugins implement `onPreBootstrap`
            // for adding extensions.

            _context.next = 91;
            return apiRunnerNode(`resolvableExtensions`, {
              traceId: `initial-resolvableExtensions`
            });

          case 91:
            apiResults = _context.sent;


            store.dispatch({
              type: `SET_PROGRAM_EXTENSIONS`,
              payload: _.flattenDeep([extensions, apiResults])
            });

            graphqlRunner = function graphqlRunner(query) {
              var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};

              var schema = store.getState().schema;
              return graphql(schema, query, context, context, context);
            };

            // Collect layouts.


            activity = report.activityTimer(`createLayouts`);
            activity.start();
            _context.next = 98;
            return apiRunnerNode(`createLayouts`, {
              graphql: graphqlRunner,
              traceId: `initial-createLayouts`,
              waitForCascadingActions: true
            });

          case 98:
            activity.end();

            // Collect pages.
            activity = report.activityTimer(`createPages`);
            activity.start();
            _context.next = 103;
            return apiRunnerNode(`createPages`, {
              graphql: graphqlRunner,
              traceId: `initial-createPages`,
              waitForCascadingActions: true
            });

          case 103:
            activity.end();

            // A variant on createPages for plugins that want to
            // have full control over adding/removing pages. The normal
            // "createPages" API is called every time (during development)
            // that data changes.
            activity = report.activityTimer(`createPagesStatefully`);
            activity.start();
            _context.next = 108;
            return apiRunnerNode(`createPagesStatefully`, {
              graphql: graphqlRunner,
              traceId: `initial-createPagesStatefully`,
              waitForCascadingActions: true
            });

          case 108:
            activity.end();

            activity = report.activityTimer(`onPreExtractQueries`);
            activity.start();
            _context.next = 113;
            return apiRunnerNode(`onPreExtractQueries`);

          case 113:
            activity.end();

            // Update Schema for SitePage.
            activity = report.activityTimer(`update schema`);
            activity.start();
            _context.next = 118;
            return require(`../schema`)();

          case 118:
            activity.end();

            // Extract queries
            activity = report.activityTimer(`extract queries from components`);
            activity.start();
            _context.next = 123;
            return extractQueries();

          case 123:
            activity.end();

            // Start the createPages hot reloader.
            if (process.env.NODE_ENV !== `production`) {
              require(`./page-hot-reloader`)(graphqlRunner);
            }

            // Run queries
            activity = report.activityTimer(`run graphql queries`);
            activity.start();
            _context.next = 129;
            return runQueries();

          case 129:
            activity.end();

            // Write out files.
            activity = report.activityTimer(`write out page data`);
            activity.start();
            _context.next = 134;
            return writePages();

          case 134:
            activity.end();

            // Write out redirects.
            activity = report.activityTimer(`write out redirect data`);
            activity.start();
            _context.next = 139;
            return writeRedirects();

          case 139:
            activity.end();

            checkJobsDone = _.debounce(function (resolve) {
              var state = store.getState();
              if (state.jobs.active.length === 0) {
                report.log(``);
                report.info(`bootstrap finished - ${process.uptime()} s`);
                report.log(``);

                // onPostBootstrap
                activity = report.activityTimer(`onPostBootstrap`);
                activity.start();
                apiRunnerNode(`onPostBootstrap`).then(function () {
                  activity.end();
                  resolve({ graphqlRunner });
                });
              }
            }, 100);

            if (!(store.getState().jobs.active.length === 0)) {
              _context.next = 153;
              break;
            }

            // onPostBootstrap
            activity = report.activityTimer(`onPostBootstrap`);
            activity.start();
            _context.next = 146;
            return apiRunnerNode(`onPostBootstrap`);

          case 146:
            activity.end();

            report.log(``);
            report.info(`bootstrap finished - ${process.uptime()} s`);
            report.log(``);
            return _context.abrupt("return", { graphqlRunner });

          case 153:
            return _context.abrupt("return", new Promise(function (resolve) {
              // Wait until all side effect jobs are finished.
              emitter.on(`END_JOB`, function () {
                return checkJobsDone(resolve);
              });
            }));

          case 154:
          case "end":
            return _context.stop();
        }
      }
    }, _callee, undefined, [[25, 30], [43, 56]]);
Exemplo n.º 6
0
const writePages = async () => {
  bootstrapFinished = true
  let { program, jsonDataPaths, pages } = store.getState()
  pages = [...pages.values()]

  const pagesComponentDependencies = {}

  // Write out pages.json
  let pagesData = []
  pages.forEach(({ path, matchPath, componentChunkName, jsonName }) => {
    const pageComponentsChunkNames = {
      componentChunkName,
    }

    if (program._[0] === `develop`) {
      pagesComponentDependencies[path] = pageComponentsChunkNames
    }

    pagesData.push({
      ...pageComponentsChunkNames,
      jsonName,
      path,
      matchPath,
    })
  })

  pagesData = _(pagesData)
    // Ensure pages keep the same sorting through builds.
    // Pages without matchPath come first, then pages with matchPath,
    // where more specific patterns come before less specific patterns.
    // This ensures explicit routes will match before general.
    // Specificity is inferred from number of path segments.
    .sortBy(
      p =>
        `${p.matchPath ? 9999 - p.matchPath.split(`/`).length : `0000`}${
          p.path
        }`
    )
    .value()
  const newHash = crypto
    .createHash(`md5`)
    .update(JSON.stringify(pagesComponentDependencies))
    .digest(`hex`)

  if (newHash === lastHash) {
    // components didn't change - no need to rewrite pages.json
    return Promise.resolve()
  }

  lastHash = newHash

  // Get list of components, and json files.
  let components = []
  pages.forEach(p => {
    components.push({
      componentChunkName: p.componentChunkName,
      component: p.component,
    })
  })

  components = _.uniqBy(components, c => c.componentChunkName)

  // Create file with sync requires of components/json files.
  let syncRequires = `const { hot } = require("react-hot-loader/root")

// prefer default export if available
const preferDefault = m => m && m.default || m
\n\n`
  syncRequires += `exports.components = {\n${components
    .map(
      c =>
        `  "${c.componentChunkName}": hot(preferDefault(require("${joinPath(
          c.component
        )}")))`
    )
    .join(`,\n`)}
}\n\n`

  // Create file with async requires of components/json files.
  let asyncRequires = `// prefer default export if available
const preferDefault = m => m && m.default || m
\n`
  asyncRequires += `exports.components = {\n${components
    .map(
      c =>
        `  "${c.componentChunkName}": () => import("${joinPath(
          c.component
        )}" /* webpackChunkName: "${c.componentChunkName}" */)`
    )
    .join(`,\n`)}
}\n\n`

  asyncRequires += `exports.data = () => import(/* webpackChunkName: "pages-manifest" */ "${joinPath(
    program.directory,
    `.cache`,
    `data.json`
  )}")\n\n`

  const writeAndMove = (file, data) => {
    const destination = joinPath(program.directory, `.cache`, file)
    const tmp = `${destination}.${Date.now()}`
    return fs
      .writeFile(tmp, data)
      .then(() => fs.move(tmp, destination, { overwrite: true }))
  }

  const result = await Promise.all([
    writeAndMove(`pages.json`, JSON.stringify(pagesData, null, 4)),
    writeAndMove(`sync-requires.js`, syncRequires),
    writeAndMove(`async-requires.js`, asyncRequires),
    writeAndMove(
      `data.json`,
      JSON.stringify({
        pages: pagesData,
        // Sort dataPaths by keys to ensure keeping the same
        // sorting through builds
        dataPaths: _(jsonDataPaths)
          .toPairs()
          .sortBy(0)
          .fromPairs()
          .value(),
      })
    ),
  ])

  return result
}
Exemplo n.º 7
0
module.exports = async function generateSqip(options) {
  const {
    cache,
    absolutePath,
    numberOfPrimitives,
    blur,
    mode,
    cacheDir,
  } = options

  debug({ options })

  const { name } = parse(absolutePath)

  const sqipOptions = {
    numberOfPrimitives,
    blur,
    mode,
  }

  const optionsHash = crypto
    .createHash(`md5`)
    .update(JSON.stringify(sqipOptions))
    .digest(`hex`)

  const cacheKey = `sqip-${name}-${optionsHash}`
  const cachePath = resolve(cacheDir, `${name}-${optionsHash}.svg`)
  let primitiveData = await cache.get(cacheKey)

  debug({ primitiveData })

  if (!primitiveData) {
    let svg
    if (await exists(cachePath)) {
      const svgBuffer = await readFile(cachePath)
      svg = svgBuffer.toString()
    } else {
      debug(`generate sqip for ${name}`)
      const result = await queue.add(
        async () =>
          new Promise((resolve, reject) => {
            try {
              const result = sqip({
                filename: absolutePath,
                ...sqipOptions,
              })
              resolve(result)
            } catch (error) {
              reject(error)
            }
          })
      )

      svg = result.final_svg

      await writeFile(cachePath, svg)
    }

    primitiveData = {
      svg,
      dataURI: svgToMiniDataURI(svg),
    }

    await cache.set(cacheKey, primitiveData)
  }

  return primitiveData
}
Exemplo n.º 8
0
async function onCreateNode({ node, getNode, actions, loadNodeContent }) {
  const { createNode, createParentChildLink } = actions
  const fileExtsToProcess = [`js`, `jsx`, `ts`, `tsx`]

  // This only processes JavaScript and TypeScript files.
  if (!_.includes(fileExtsToProcess, node.extension)) {
    return
  }

  const code = await loadNodeContent(node)
  const options = {
    sourceType: `module`,
    allowImportExportEverywhere: true,
    plugins: [
      `jsx`,
      `doExpressions`,
      `objectRestSpread`,
      [
        `decorators`,
        {
          decoratorsBeforeExport: true,
        },
      ],
      `classProperties`,
      `exportExtensions`,
      `asyncGenerators`,
      `functionBind`,
      `functionSent`,
      `dynamicImport`,
      `flow`,
    ],
  }

  let exportsData, frontmatter, error
  try {
    const ast = babylon.parse(code, options)

    const parseData = function parseData(node) {
      let value

      if (node.type === `TemplateLiteral`) {
        // Experimental basic support for template literals:
        // Extract and join any text content; ignore interpolations
        value = node.quasis.map(quasi => quasi.value.cooked).join(``)
      } else if (node.type === `ObjectExpression`) {
        value = {}
        node.properties.forEach(elem => {
          value[elem.key.name] = parseData(elem.value)
        })
      } else if (node.type === `ArrayExpression`) {
        value = node.elements.map(elem => parseData(elem))
      } else {
        value = node.value
      }

      return value
    }

    frontmatter = {}
    error = false
    traverse(ast, {
      AssignmentExpression: function AssignmentExpression(astPath) {
        if (
          astPath.node.left.type === `MemberExpression` &&
          astPath.node.left.property.name === `frontmatter`
        ) {
          astPath.node.right.properties.forEach(node => {
            frontmatter[node.key.name] = parseData(node.value)
          })
        }
      },
      ExportNamedDeclaration: function ExportNamedDeclaration(astPath) {
        const { declaration } = astPath.node
        if (declaration && declaration.type === `VariableDeclaration`) {
          const dataVariableDeclarator = _.find(
            declaration.declarations,
            d => d.id.name === `frontmatter`
          )

          if (dataVariableDeclarator && dataVariableDeclarator.init) {
            dataVariableDeclarator.init.properties.forEach(node => {
              frontmatter[node.key.name] = parseData(node.value)
            })
          }
        }
      },
    })
  } catch (e) {
    // stick the error on the query so the user can
    // react to an error as they see fit
    error = {
      err: true,
      code: e.code,
      message: e.message,
      stack: e.stack,
    }
  } finally {
    // only create node if frontmatter is not empty
    if (!_.isEmpty(frontmatter)) {
      exportsData = {
        ...frontmatter,
        error: error,
      }

      const objStr = JSON.stringify(node)
      const contentDigest = crypto
        .createHash(`md5`)
        .update(objStr)
        .digest(`hex`)

      const nodeData = {
        id: `${node.id} >>> JavascriptFrontmatter`,
        children: [],
        parent: node.id,
        node: { ...node },
        internal: {
          contentDigest,
          type: `JavascriptFrontmatter`,
        },
      }

      nodeData.frontmatter = { ...exportsData }

      if (node.internal.type === `File`) {
        nodeData.fileAbsolutePath = node.absolutePath
      }

      createNode(nodeData)
      createParentChildLink({ parent: node, child: nodeData })
    }
  }
}
Exemplo n.º 9
0
  const generateImagesAndUpdateNode = async function(node, resolve) {
    // Ignore if it is not contentful image

    if (node.url.indexOf(`images.ctfassets.net`) === -1) {
      return resolve()
    }

    const srcSplit = node.url.split(`/`)
    const fileName = srcSplit[srcSplit.length - 1]
    const options = _.defaults(pluginOptions, defaults)

    const optionsHash = crypto
      .createHash(`md5`)
      .update(JSON.stringify(options))
      .digest(`hex`)

    const cacheKey = `remark-images-ctf-${fileName}-${optionsHash}`
    let cahedRawHTML = await cache.get(cacheKey)

    if (cahedRawHTML) {
      return cahedRawHTML
    }
    const metaReader = sharp()

    const response = await axios({
      method: `GET`,
      url: `https:${node.url}`, // for some reason there is a './' prefix
      responseType: `stream`,
    })

    response.data.pipe(metaReader)

    const metadata = await metaReader.metadata()

    response.data.destroy()

    const responsiveSizesResult = await buildResponsiveSizes({
      metadata,
      imageUrl: `https:${node.url}`,
      options,
    })
    // Calculate the paddingBottom %
    const ratio = `${(1 / responsiveSizesResult.aspectRatio) * 100}%`

    const fallbackSrc = `https${node.url}`
    const srcSet = responsiveSizesResult.srcSet
    const presentationWidth = responsiveSizesResult.presentationWidth

    // Generate default alt tag
    const originalImg = node.url
    const fileNameNoExt = fileName.replace(/\.[^/.]+$/, ``)
    const defaultAlt = fileNameNoExt.replace(/[^A-Z0-9]/gi, ` `)

    // Construct new image node w/ aspect ratio placeholder
    let rawHTML = `
  <span
    class="gatsby-resp-image-wrapper"
    style="position: relative; display: block; ${
      options.wrapperStyle
    }; max-width: ${presentationWidth}px; margin-left: auto; margin-right: auto;"
  >
    <span
      class="gatsby-resp-image-background-image"
      style="padding-bottom: ${ratio}; position: relative; bottom: 0; left: 0; background-image: url('${
      responsiveSizesResult.base64
    }'); background-size: cover; display: block;"
    >
      <img
        class="gatsby-resp-image-image"
        style="width: 100%; height: 100%; margin: 0; vertical-align: middle; position: absolute; top: 0; left: 0; box-shadow: inset 0px 0px 0px 400px ${
          options.backgroundColor
        };"
        alt="${node.alt ? node.alt : defaultAlt}"
        title="${node.title ? node.title : ``}"
        src="${fallbackSrc}"
        srcset="${srcSet}"
        sizes="${responsiveSizesResult.sizes}"
      />
    </span>
  </span>
  `
    // Make linking to original image optional.
    if (options.linkImagesToOriginal) {
      rawHTML = `
<a
  class="gatsby-resp-image-link"
  href="${originalImg}"
  style="display: block"
  target="_blank"
  rel="noopener"
>
${rawHTML}
</a>
  `
    }

    // Wrap in figure and use title as caption

    if (options.showCaptions && node.title) {
      rawHTML = `
<figure class="gatsby-resp-image-figure">
${rawHTML}
<figcaption class="gatsby-resp-image-figcaption">${node.title}</figcaption>
</figure>`
    }
    await cache.set(cacheKey, rawHTML)
    return rawHTML
  }
Exemplo n.º 10
0
exports.createFileNode = async (pathToFile, pluginOptions = {}) => {
  const slashed = slash(pathToFile)
  const parsedSlashed = path.parse(slashed)
  const slashedFile = {
    ...parsedSlashed,
    absolutePath: slashed,
    // Useful for limiting graphql query with certain parent directory
    relativeDirectory: path.relative(
      pluginOptions.path || process.cwd(),
      parsedSlashed.dir
    ),
  }

  const stats = await fs.stat(slashedFile.absolutePath)
  let internal
  if (stats.isDirectory()) {
    const contentDigest = crypto
      .createHash(`md5`)
      .update(
        JSON.stringify({ stats: stats, absolutePath: slashedFile.absolutePath })
      )
      .digest(`hex`)
    internal = {
      contentDigest,
      type: `Directory`,
    }
  } else {
    const contentDigest = await md5File(slashedFile.absolutePath)
    internal = {
      contentDigest,
      mediaType: mime.lookup(slashedFile.ext),
      type: `File`,
    }
  }

  // console.log('createFileNode:stat', slashedFile.absolutePath)
  // Stringify date objects.
  return JSON.parse(
    JSON.stringify({
      // Don't actually make the File id the absolute path as otherwise
      // people will use the id for that and ids shouldn't be treated as
      // useful information.
      id: createId(pathToFile),
      children: [],
      parent: `___SOURCE___`,
      internal,
      sourceInstanceName: pluginOptions.name || `__PROGRAMATTIC__`,
      absolutePath: slashedFile.absolutePath,
      relativePath: slash(
        path.relative(
          pluginOptions.path || process.cwd(),
          slashedFile.absolutePath
        )
      ),
      extension: slashedFile.ext.slice(1).toLowerCase(),
      size: stats.size,
      prettySize: prettyBytes(stats.size),
      modifiedTime: stats.mtime,
      accessTime: stats.atime,
      changeTime: stats.ctime,
      birthTime: stats.birthtime,
      ...slashedFile,
      ...stats,
    })
  )
}
Exemplo n.º 11
0
const createContentDigest = obj =>
  crypto
    .createHash(`md5`)
    .update(JSON.stringify(obj))
    .digest(`hex`)
Exemplo n.º 12
0
  result.data.data.hn.topStories.forEach((story, i) => {
    const storyStr = JSON.stringify(story)

    // Ask HN, Polls, etc. don't have urls.
    // For those that do, HN displays just the bare domain.
    let domain
    if (story.url) {
      const parsedUrl = url.parse(story.url)
      const splitHost = parsedUrl.host.split(`.`)
      if (splitHost.length > 2) {
        domain = splitHost.slice(1).join(`.`)
      } else {
        domain = splitHost.join(`.`)
      }
    }

    let kids
    kids = _.pick(story, `kids`)
    if (!kids.kids) {
      kids.kids = []
    }
    const kidLessStory = _.omit(story, `kids`)
    const childIds = kids.kids.map(k => createNodeId(k.id))

    const storyNode = {
      ...kidLessStory,
      id: createNodeId(kidLessStory.id),
      children: childIds,
      parent: null,
      content: storyStr,
      internal: {
        type: `HNStory`,
      },
      domain,
      order: i + 1,
    }

    // Just store the user id
    storyNode.by = storyNode.by.id

    // Get content digest of node.
    const contentDigest = crypto
      .createHash(`md5`)
      .update(JSON.stringify(storyNode))
      .digest(`hex`)

    storyNode.internal.contentDigest = contentDigest
    createNode(storyNode)

    // Recursively create comment nodes.
    const createCommentNodes = (comments, parent, depth = 0) => {
      comments.forEach((comment, i) => {
        if (!comment.kids) {
          comment.kids = []
        }
        let commentChildIds = comment.kids.map(k => createNodeId(k.id))
        let commentNode = {
          ..._.omit(comment, `kids`),
          id: createNodeId(comment.id),
          children: commentChildIds,
          parent,
          internal: {
            type: `HNComment`,
          },
          order: i + 1,
        }

        commentNode.by = commentNode.by.id
        const nodeStr = JSON.stringify(commentNode)

        // Get content digest of comment node.
        const contentDigest = crypto
          .createHash(`md5`)
          .update(nodeStr)
          .digest(`hex`)

        commentNode.internal.contentDigest = contentDigest
        commentNode.internal.content = nodeStr

        createNode(commentNode)

        if (comment.kids.length > 0) {
          createCommentNodes(comment.kids, commentNode.id, depth + 1)
        }
      })
    }

    createCommentNodes(kids.kids, storyNode.id)
  })
      return _regenerator2.default.wrap(function _callee3$(_context3) {
        while (1) {
          switch (_context3.prev = _context3.next) {
            case 0:
              text = void 0;
              _context3.prev = 1;
              _context3.next = 4;
              return fs.readFile(file, `utf8`);

            case 4:
              text = _context3.sent;
              _context3.next = 11;
              break;

            case 7:
              _context3.prev = 7;
              _context3.t0 = _context3["catch"](1);

              report.error(`There was a problem reading the file: ${file}`, _context3.t0);
              return _context3.abrupt("return", null);

            case 11:
              if (!(text.indexOf(`graphql`) === -1)) {
                _context3.next = 13;
                break;
              }

              return _context3.abrupt("return", null);

            case 13:
              hash = crypto.createHash(`md5`).update(file).update(text).digest(`hex`);
              _context3.prev = 14;
              _context3.t1 = cache[hash];

              if (_context3.t1) {
                _context3.next = 20;
                break;
              }

              _context3.next = 19;
              return findGraphQLTags(file, text);

            case 19:
              _context3.t1 = cache[hash] = _context3.sent;

            case 20:
              astDefinitions = _context3.t1;
              return _context3.abrupt("return", astDefinitions.length ? {
                kind: `Document`,
                definitions: astDefinitions
              } : null);

            case 24:
              _context3.prev = 24;
              _context3.t2 = _context3["catch"](14);

              report.error(`There was a problem parsing the GraphQL query in file: ${file}`, _context3.t2);
              return _context3.abrupt("return", null);

            case 28:
            case "end":
              return _context3.stop();
          }
        }
      }, _callee3, this, [[1, 7], [14, 24]]);
Exemplo n.º 14
0
module.exports = async (args: BootstrapArgs) => {
  const program = {
    ...args,
    // Fix program directory path for windows env.
    directory: slash(args.directory),
  }

  store.dispatch({
    type: `SET_PROGRAM`,
    payload: program,
  })

  // Delete html files from the public directory as we don't want deleted
  // pages from previous builds to stick around.
  let activity = report.activityTimer(`delete html files from previous builds`)
  activity.start()
  await del([
    `public/*.html`,
    `public/**/*.html`,
    `!public/static`,
    `!public/static/**/*.html`,
  ])
  activity.end()

  // Try opening the site's gatsby-config.js file.
  activity = report.activityTimer(`open and validate gatsby-config.js`)
  activity.start()
  let config
  try {
    // $FlowFixMe
    config = preferDefault(require(`${program.directory}/gatsby-config`))
  } catch (err) {
    if (!testRequireError(`${program.directory}/gatsby-config`, err)) {
      report.error(`Could not load gatsby-config`, err)
      process.exit(1)
    }
  }

  store.dispatch({
    type: `SET_SITE_CONFIG`,
    payload: config,
  })

  activity.end()

  const flattenedPlugins = await loadPlugins(config)

  // Check if any plugins have been updated since our last run. If so
  // we delete the cache is there's likely been changes
  // since the previous run.
  //
  // We do this by creating a hash of all the version numbers of installed
  // plugins, the site's package.json, gatsby-config.js, and gatsby-node.js.
  // The last, gatsby-node.js, is important as many gatsby sites put important
  // logic in there e.g. generating slugs for custom pages.
  const pluginVersions = flattenedPlugins.map(p => p.version)
  const hashes = await Promise.all([
    md5File(`package.json`),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-config.js`).catch(() => {})
    ), // ignore as this file isn't required),
    Promise.resolve(
      md5File(`${program.directory}/gatsby-node.js`).catch(() => {})
    ), // ignore as this file isn't required),
  ])
  const pluginsHash = crypto
    .createHash(`md5`)
    .update(JSON.stringify(pluginVersions.concat(hashes)))
    .digest(`hex`)
  let state = store.getState()
  const oldPluginsHash = state && state.status ? state.status.PLUGINS_HASH : ``

  // Check if anything has changed. If it has, delete the site's .cache
  // directory and tell reducers to empty themselves.
  //
  // Also if the hash isn't there, then delete things just in case something
  // is weird.
  if (oldPluginsHash && pluginsHash !== oldPluginsHash) {
    report.info(report.stripIndent`
      One or more of your plugins have changed since the last time you ran Gatsby. As
      a precaution, we're deleting your site's cache to ensure there's not any stale
      data
    `)
  }

  if (!oldPluginsHash || pluginsHash !== oldPluginsHash) {
    try {
      await fs.remove(`${program.directory}/.cache`)
    } catch (e) {
      report.error(`Failed to remove .cache files.`, e)
    }
    // Tell reducers to delete their data (the store will already have
    // been loaded from the file system cache).
    store.dispatch({
      type: `DELETE_CACHE`,
    })
  }

  // Update the store with the new plugins hash.
  store.dispatch({
    type: `UPDATE_PLUGINS_HASH`,
    payload: pluginsHash,
  })

  // Now that we know the .cache directory is safe, initialize the cache
  // directory.
  initCache()

  // Ensure the public/static directory is created.
  await fs.ensureDirSync(`${program.directory}/public/static`)

  // Copy our site files to the root of the site.
  activity = report.activityTimer(`copy gatsby files`)
  activity.start()
  const srcDir = `${__dirname}/../../cache-dir`
  const siteDir = `${program.directory}/.cache`
  const tryRequire = `${__dirname}/../utils/test-require-error.js`
  try {
    await fs.copy(srcDir, siteDir, { clobber: true })
    await fs.copy(tryRequire, `${siteDir}/test-require-error.js`, {
      clobber: true,
    })
    await fs.ensureDirSync(`${program.directory}/.cache/json`)
    await fs.ensureDirSync(`${program.directory}/.cache/layouts`)

    // Ensure .cache/fragments exists and is empty. We want fragments to be
    // added on every run in response to data as fragments can only be added if
    // the data used to create the schema they're dependent on is available.
    await fs.emptyDir(`${program.directory}/.cache/fragments`)
  } catch (err) {
    report.panic(`Unable to copy site files to .cache`, err)
  }

  // Find plugins which implement gatsby-browser and gatsby-ssr and write
  // out api-runners for them.
  const hasAPIFile = (env, plugin) =>
    // TODO make this async...
    glob.sync(`${plugin.resolve}/gatsby-${env}*`)[0]

  const ssrPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`ssr`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )
  const browserPlugins = _.filter(
    flattenedPlugins.map(plugin => {
      return {
        resolve: hasAPIFile(`browser`, plugin),
        options: plugin.pluginOptions,
      }
    }),
    plugin => plugin.resolve
  )

  let browserAPIRunner = ``

  try {
    browserAPIRunner = fs.readFileSync(
      `${siteDir}/api-runner-browser.js`,
      `utf-8`
    )
  } catch (err) {
    report.panic(`Failed to read ${siteDir}/api-runner-browser.js`, err)
  }

  const browserPluginsRequires = browserPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)

  browserAPIRunner = `var plugins = [${browserPluginsRequires}]\n${browserAPIRunner}`

  let sSRAPIRunner = ``

  try {
    sSRAPIRunner = fs.readFileSync(`${siteDir}/api-runner-ssr.js`, `utf-8`)
  } catch (err) {
    report.panic(`Failed to read ${siteDir}/api-runner-ssr.js`, err)
  }

  const ssrPluginsRequires = ssrPlugins
    .map(
      plugin =>
        `{
      plugin: require('${plugin.resolve}'),
      options: ${JSON.stringify(plugin.options)},
    }`
    )
    .join(`,`)
  sSRAPIRunner = `var plugins = [${ssrPluginsRequires}]\n${sSRAPIRunner}`

  fs.writeFileSync(
    `${siteDir}/api-runner-browser.js`,
    browserAPIRunner,
    `utf-8`
  )
  fs.writeFileSync(`${siteDir}/api-runner-ssr.js`, sSRAPIRunner, `utf-8`)

  activity.end()
  /**
   * Start the main bootstrap processes.
   */

  // onPreBootstrap
  activity = report.activityTimer(`onPreBootstrap`)
  activity.start()
  await apiRunnerNode(`onPreBootstrap`)
  activity.end()

  // Source nodes
  activity = report.activityTimer(`source and transform nodes`)
  activity.start()
  await require(`../utils/source-nodes`)()
  activity.end()

  // Create Schema.
  activity = report.activityTimer(`building schema`)
  activity.start()
  await require(`../schema`)()
  activity.end()

  // Collect resolvable extensions and attach to program.
  const extensions = [`.js`, `.jsx`]
  // Change to this being an action and plugins implement `onPreBootstrap`
  // for adding extensions.
  const apiResults = await apiRunnerNode(`resolvableExtensions`, {
    traceId: `initial-resolvableExtensions`,
  })

  store.dispatch({
    type: `SET_PROGRAM_EXTENSIONS`,
    payload: _.flattenDeep([extensions, apiResults]),
  })

  const graphqlRunner = (query, context = {}) => {
    const schema = store.getState().schema
    return graphql(schema, query, context, context, context)
  }

  // Collect layouts.
  activity = report.activityTimer(`createLayouts`)
  activity.start()
  await apiRunnerNode(`createLayouts`, {
    graphql: graphqlRunner,
    traceId: `initial-createLayouts`,
    waitForCascadingActions: true,
  })
  activity.end()

  // Collect pages.
  activity = report.activityTimer(`createPages`)
  activity.start()
  await apiRunnerNode(`createPages`, {
    graphql: graphqlRunner,
    traceId: `initial-createPages`,
    waitForCascadingActions: true,
  })
  activity.end()

  // A variant on createPages for plugins that want to
  // have full control over adding/removing pages. The normal
  // "createPages" API is called every time (during development)
  // that data changes.
  activity = report.activityTimer(`createPagesStatefully`)
  activity.start()
  await apiRunnerNode(`createPagesStatefully`, {
    graphql: graphqlRunner,
    traceId: `initial-createPagesStatefully`,
    waitForCascadingActions: true,
  })
  activity.end()

  activity = report.activityTimer(`onPreExtractQueries`)
  activity.start()
  await apiRunnerNode(`onPreExtractQueries`)
  activity.end()

  // Update Schema for SitePage.
  activity = report.activityTimer(`update schema`)
  activity.start()
  await require(`../schema`)()
  activity.end()

  // Extract queries
  activity = report.activityTimer(`extract queries from components`)
  activity.start()
  await extractQueries()
  activity.end()

  // Start the createPages hot reloader.
  if (process.env.NODE_ENV !== `production`) {
    require(`./page-hot-reloader`)(graphqlRunner)
  }

  // Run queries
  activity = report.activityTimer(`run graphql queries`)
  activity.start()
  await runQueries()
  activity.end()

  // Write out files.
  activity = report.activityTimer(`write out page data`)
  activity.start()
  try {
    await writePages()
  } catch (err) {
    report.panic(`Failed to write out page data`, err)
  }
  activity.end()

  // Write out redirects.
  activity = report.activityTimer(`write out redirect data`)
  activity.start()
  await writeRedirects()
  activity.end()

  const checkJobsDone = _.debounce(resolve => {
    const state = store.getState()
    if (state.jobs.active.length === 0) {
      report.log(``)
      report.info(`bootstrap finished - ${process.uptime()} s`)
      report.log(``)

      // onPostBootstrap
      activity = report.activityTimer(`onPostBootstrap`)
      activity.start()
      apiRunnerNode(`onPostBootstrap`).then(() => {
        activity.end()
        resolve({ graphqlRunner })
      })
    }
  }, 100)

  if (store.getState().jobs.active.length === 0) {
    // onPostBootstrap
    activity = report.activityTimer(`onPostBootstrap`)
    activity.start()
    await apiRunnerNode(`onPostBootstrap`)
    activity.end()

    report.log(``)
    report.info(`bootstrap finished - ${process.uptime()} s`)
    report.log(``)
    return { graphqlRunner }
  } else {
    return new Promise(resolve => {
      // Wait until all side effect jobs are finished.
      emitter.on(`END_JOB`, () => checkJobsDone(resolve))
    })
  }
}
Exemplo n.º 15
0
exports.sourceNodes = ({ boundActionCreators, store }) => {
  const { createNode } = boundActionCreators
  const state = store.getState()
  const { program } = state
  const { flattenedPlugins } = state

  // Add our default development page since we know it's going to
  // exist and we need a node to exist so it's query works :-)
  const page = { path: `/dev-404-page/` }
  createNode({
    ...page,
    id: createPageId(page.path),
    parent: `SOURCE`,
    children: [],
    internal: {
      type: `SitePage`,
      contentDigest: crypto
        .createHash(`md5`)
        .update(JSON.stringify(page))
        .digest(`hex`),
    },
  })

  flattenedPlugins.forEach(plugin => {
    plugin.pluginFilepath = plugin.resolve
    createNode({
      ...plugin,
      packageJson: transformPackageJson(
        require(`${plugin.resolve}/package.json`)
      ),
      id: `Plugin ${plugin.name}`,
      parent: `SOURCE`,
      children: [],
      internal: {
        contentDigest: crypto
          .createHash(`md5`)
          .update(JSON.stringify(plugin))
          .digest(`hex`),
        type: `SitePlugin`,
      },
    })
  })

  // Add site node.
  const buildTime = moment().subtract(process.uptime(), `seconds`).toJSON()

  const createGatsbyConfigNode = (config = {}) => {
    // Delete plugins from the config as we add plugins above.
    const configCopy = { ...config }
    delete configCopy.plugins
    const node = {
      siteMetadata: {
        ...configCopy.siteMetadata,
      },
      port: state.program.port,
      host: state.program.host,
      ...configCopy,
      buildTime,
    }
    createNode({
      ...node,
      id: `Site`,
      parent: `SOURCE`,
      children: [],
      internal: {
        contentDigest: crypto
          .createHash(`md5`)
          .update(JSON.stringify(node))
          .digest(`hex`),
        type: `Site`,
      },
    })
  }

  createGatsbyConfigNode(state.config)

  const pathToGatsbyConfig = systemPath.join(
    program.directory,
    `gatsby-config.js`
  )
  chokidar.watch(pathToGatsbyConfig).on(`change`, () => {
    // Delete require cache so we can reload the module.
    delete require.cache[require.resolve(pathToGatsbyConfig)]
    const config = require(pathToGatsbyConfig)
    createGatsbyConfigNode(config)
  })
}