exports.testPrintRead = function () { try { var path = "testPrintRead.txt"; fs.path(path).open('w').print("hello").print("world"); assert.is("hello\nworld\n", fs.path(path).open().read()); } finally { fs.remove(path); } };
function read () { var IO = require('../../lib/jison/util/io'); try { return IO.read(IO.join.apply(IO,[__dirname].concat([].slice.call(arguments,0)))); } catch(e) { var fs = require("file"); return fs.path(fs.dirname(module.id)).join.apply(fs.path(fs.dirname(module.id)),arguments) .read({charset: "utf-8"}); } }
exports.testLittlePathOpenWriteRead = function () { try { var path = "testLittlePathOpenWriteRead.txt"; var content = "testLittlePathOpenWriteRead.txt\n"; fs.path(path).open('w').write(content).flush().close(); assert.is(content, fs.path(path).open().read()); } finally { fs.remove(path); } };
exports.testWriteReadBinaryNulls = function () { try { var path = "testWriteReadBinaryNulls.txt"; var content = "\0\0\0".toByteString("ascii"); fs.path(path).open('wb').write(content).flush().close(); assert.eq(content, fs.path(path).open('b').read()); } finally { fs.remove(path); } };
exports.testPathWriteRead = function () { try { var path = "testOpenWriteRead.txt"; var content = "testOpenWriteRead.txt\n"; fs.path(path).write(content); assert.is(content, fs.path(path).read()); } finally { fs.remove(path); } };
exports.testsRenameList = function () { try { fs.mkdir('testsRename'); fs.path('testsRename', 'A.txt').touch(); assert.eq(fs.path('testsRename').list(), ['A.txt']); fs.path('testsRename', 'A.txt').rename('B.txt'); assert.eq(fs.path('testsRename').list(), ['B.txt']); } finally { fs.rmtree('testsRename'); } };
parser.action(function (options) { var policy = {}; policy.useCache = options.useCache; policy.input = options.input && FS.path(options.input); policy.output = options.output && FS.path(options.output); policy.useDefaultSources = options.useDefaultSources; if (!options.args.length) { UPDATE.update(policy); } else { UPDATE.updatePackages(options.args, policy); } });
exports["test lex grammar bootstrap"] = function () { var fs = require("file"); var lexgrammar = lex.parse(fs.path(fs.dirname(module.id)) .join('lex', 'lex_grammar.jisonlex') .read({charset: "utf-8"})); var expected = JSON.parse(fs.path(fs.dirname(module.id)) .join('lex', 'lex_grammar.lex.json') .read({charset: "utf-8"})); assert.deepEqual(lexgrammar, expected, "grammar should be parsed correctly"); };
exports.testIsLink = function () { var here = fs.path(module.path).dirname().join("_test"); if (here.exists()) { here.rmtree(); } here.mkdirs(); try { var dir1 = here.join("dir1"); dir1.mkdirs(); var dir2 = here.join("dir2"); dir2.mkdirs(); dir2.join("file2").touch(); dir2.symlink(dir1.join("linkToDir2")); assert.isFalse(dir2.isLink()); assert.isTrue(dir1.join("linkToDir2").isLink()); dir2.join("file2").symlink(dir1.join("linkToFile1")); assert.isFalse(dir2.join("file2").isLink()); assert.isTrue(dir1.join("linkToFile1").isLink()); } finally { here.rmtree(); } }
parser.action(function (options) { var packages = require("packages"); var self = this; var directory = tusk.getDirectory(); var enginesDirectory = directory.join('engines'); if (options.args.length == 0) { enginesDirectory.list().forEach(function (engineName) { self.print(engineName); }); } else { var engine = options.args.shift(); if (!util.has(packages.engines, engine)) throw new Error("No such engine " + util.enquote(engine)); var narwhalConf = directory.join('narwhal.conf'); narwhalConf.write( 'NARWHAL_ENGINE=' + os.enquote(engine) + "\n" + 'NARWHAL_ENGINE_HOME=' + os.enquote( packages.engines[engine].directory.from( fs.path(system.prefix).join('') ) ) + "\n" ); } });
exports.getCatalogPath = function (catalogPath) { if (!catalogPath) catalogPath = TUSK.getCatalogPath(); else catalogPath = FS.path(catalogPath); return catalogPath; };
exports.build = function (location, system, packages) { if (!system) system = SYSTEM; system = Object.create(system); system.engine = "browser"; system.engines = UTIL.copy(system.engines); system.engines.unshift("browser"); location = FILE.path(location); var bundler = Bundler(system); exports.ids(system, packages).forEach(function (id) { try { var parts = id.split('/'); var basename = parts.pop(); var dirname = location.join.apply(location, parts); dirname.mkdirs(); var path = dirname.join(basename + '.js'); path.write(bundler.bundle([id])); print(path); } catch (exception) { TERM.stream.print("\0red(" + exception + "\0)"); } }); };
function mockDirs(test) { try { create_mock_dirs("testGlob"); test(FILE.path("testGlob")); } finally { FILE.rmtree("testGlob") } }
exports["test ANSI C lexical grammar"] = function () { var fs = require("file"); var lexgrammar = lex.parse(fs.path(fs.dirname(module.id)) .join('lex', 'ansic.jisonlex') .read({charset: "utf-8"})); assert.ok(lexgrammar, "grammar should be parsed correctly"); };
exports.testCopy = function () { try { fs.path("testCopyA.txt").write("testCopy").copy("testCopyB.txt"); assert.is("testCopy\n", fs.read("testCopyB.txt")); } finally { fs.remove("testCopyA.txt"); fs.remove("testCopyB.txt"); } };
exports.Level.prototype.resolveTechPath = function (path) { // Получить абсолютный путь, если путь начинается с . // NOTE: заменить на !fs.isAbsolute() нельзя if (/^\./.test(path)) { // Развернуть относительный путь начиная от директории .bem path = fs.path(this.bemDir).join('/').resolve(path); } return path; };
// note: need doubled \\ to properly escape in JS // from RubySpec function create_mock_dirs(mock_dir) { mock_dir = FILE.path(mock_dir); var files = [ ".dotfile", ".dotsubdir/.dotfile", ".dotsubdir/nondotfile", "deeply/.dotfile", "deeply/nested/.dotfile.ext", "deeply/nested/directory/structure/.ext", "deeply/nested/directory/structure/bar", "deeply/nested/directory/structure/baz", "deeply/nested/directory/structure/file_one", "deeply/nested/directory/structure/file_one.ext", "deeply/nested/directory/structure/foo", "deeply/nondotfile", "file_one.ext", "file_two.ext", "dir_filename_ordering", "dir/filename_ordering", "nondotfile", "subdir_one/.dotfile", "subdir_one/nondotfile", "subdir_two/nondotfile", "subdir_two/nondotfile.ext", "special/+", "special/^", "special/$", "special/(", "special/)", "special/[", "special/]", "special/{", "special/}", // these three (and corresponding tests) aren't valid on Windows "special/*", "special/?", "special/|" ]; files.forEach(function(file) { var file = mock_dir.join(file); file.dirname().mkdirs(); file.touch(); }); }
function readVersion () { var pack; try { pack = IO.read(IO.join(__dirname,'..','..','package.json')); } catch(e) { var fs = require("file"); pack = fs.path(fs.dirname(module.id)).canonical().join('..','package.json') .read({charset: "utf-8"}); } return JSON.parse(pack).version; }
/** * Loads files by convention * @param usageType valid values are 'model', 'router', or 'service' */ function Loader(usageType){ this.rootPath = null; if (sys.env["BOGART_ROOT"]) this.rootPath = sys.env["BOGART_ROOT"]; else this.rootPath = fs.path(require.main).dirname(); this.rootPath = new fs.Path(fs.join(this.rootPath, "app", usageType)); this.fileMatcher = new RegExp("_" + usageType + ".js$"); }
exports.main = function main (args) { var fs = require("file"); gfile = fs.path(fs.cwd()).join(args[1]), grammar = JSON.parse(gfile.read({charset: "utf-8"})); if (grammar.bnf) { var fname = fs.path(fs.cwd()).join(gfile.basename(".json") + ".jison"), stream = fname.open("w"); stream.print(json2jison(grammar)); stream.close(); } var lex = grammar.lex || grammar.rules && grammar; if (lex) { var fname = fs.path(fs.cwd()).join(gfile.basename(".json").replace(/[._]?lex$/,'') + ".jisonlex"), stream = fname.open("w"); stream.print(genLex(lex)); stream.close(); } };
exports.testMoveExists = function () { try { fs.path("testCopyA.txt").write("testCopy").move("testCopyB.txt"); assert.isFalse(fs.exists("testCopyA.txt")); assert.isTrue(fs.exists("testCopyB.txt")); } finally { if (fs.exists("testCopyA.txt")) fs.remove("testCopyA.txt"); if (fs.exists("testCopyB.txt")) fs.remove("testCopyB.txt"); } };
exports.Level = function(path) { this.bemDir = (this.path = fs.path(path).absolute()).join('.bem'); // NOTE: в директории .bem внутри уровня переопределения // может лежать модуль для уровня переопределения var level = {}; try { level = require('' + this.bemDir.join('level.js')); } catch (ignore) {} util.object.update(this, level); this.techs = this.initTechs(this.techs || {}); };
exports.main = function main (args) { //var parser = new require("args").Parser(); var fs = require("file"); gfile = fs.path(fs.cwd()).join(args[1]); // try to parse as JSON, else use BNF parser if (gfile.extension() === '.json') { var grammar = JSON.parse(gfile.read({charset: "utf-8"})); } else if (gfile.extension() === '.jison') { var grammar = require("jison/bnf").parse(gfile.read({charset: "utf-8"})); } var opt = grammar.options || {}; // lexer file if (args[2]) { var lfile = fs.path(fs.cwd()).join(args[2]); // try to parse as JSON, else use BNF parser if (lfile.extension() === '.json') { grammar.lex = JSON.parse(lfile.read({charset: "utf-8"})); } else if (lfile.extension() === '.jisonlex') { grammar.lex = require("jison/jisonlex").parse(lfile.read({charset: "utf-8"})); } } if (!opt.moduleName) opt.moduleName = gfile.basename().replace(new RegExp(gfile.extension()+"$"), ""); if (!opt.moduleType) opt.moduleType = "commonjs"; var generator = new Jison.Generator(grammar, opt); fname = fs.path(fs.cwd()).join(opt.moduleName + ".js"), source = generator.generate(opt), stream = fname.open("w"); stream.print(source); stream.close(); };
exports.testMoveExists = function () { var testString = "testCopy"; try { fs.path("testCopyA.txt").write(testString).move("testCopyB.txt"); assert.isFalse(fs.exists("testCopyA.txt")); assert.isTrue(fs.exists("testCopyB.txt")); assert.is(fs.size("testCopyB.txt"), testString.length); } finally { if (fs.exists("testCopyA.txt")) fs.remove("testCopyA.txt"); if (fs.exists("testCopyB.txt")) fs.remove("testCopyB.txt"); } };
var exported = function () { for (var name in exports) { if (exports[name] === exported) { try { var path = fs.path( fs.resolve(module.path, '.'), name ); block(path); } finally { if (path.exists()) path.rmtree(); } } } };
exports.Tech.prototype.getTechRelativePath = function (bemPath) { var bemPath = fs.path(bemPath).join('/'), absPath = this.getTechPath(); // NOTE: Если путь до технологии пустой, значит используется // реализация технологии по-умолчанию, и путь надо оставить пустым if (absPath == '') { return ''; } // FIXME: лучше искать «короткий путь», обдумать критерии такого пути var shortestPath = fs.relative(bemPath, absPath); require.paths.forEach(function (reqPath) { var relPath = fs.relative(fs.path(reqPath).join('/'), absPath); if (relPath.length < shortestPath.length) { shortestPath = relPath; } }); return shortestPath; };
Store.prototype.get = function (url) { var parsed = URI.parse(url); if (parsed.scheme == "http") { ASSERT.ok(parsed.authority, "URI store URI's must be fully qualified"); ASSERT.ok(parsed.root, "URI store URI's must be fully qualified"); return this.path.join.apply( this.path, [parsed.authority] .concat(parsed.directories) .concat([parsed.file]) ); } else if (parsed.scheme == "file") { var base = parsed.authorityRoot || parsed.root ? FS.path('/'): FS.cwdPath(); return base.join.apply( base, parsed.directories.concat([parsed.file]) ); } else { ASSERT.ok(false, "URI scheme must be http or file"); } };
exports['test ' + testName] = function () { var prefix = fs.path(module.id).resolve(testName).join(''); var done; var print = function (message) { assert.isFalse(/^FAIL/.test(message)); if (/^ERROR/.test(message)) throw new Error(message); if (/^DONE/.test(message)) done = true; }; sandbox( 'program', system, { prefix: prefix, loader: require.loader, print: print } ); assert.isTrue(done, 'done'); };
exports.read = function read(prefixes, catalog, usingCatalog, options) { // construct an object graph from package json files // through a breadth first search of the root package and // its transitive packages/ directories. if (!catalog) throw new Error("must pass a package data object as the second argument to packages.read."); var visitedPackages = {}; var root; prefixes = UTIL.copy(prefixes); if (typeof prefixes == 'string') prefixes = [prefixes]; // queue-based breadth-first-search of the package // tree starting with the "root" while (prefixes.length) { var queue = [FILE.path(prefixes.shift())]; while (queue.length) { var item = queue.shift(), packageDirectory, name, dependencyInfo = null; if(UTIL.isArrayLike(item)) { packageDirectory = item[0]; dependencyInfo = item[1]; name = dependencyInfo.name; } else { packageDirectory = item; name = packageDirectory.basename(); } // check for cyclic symbolic linkage var canonicalPackageDirectory = packageDirectory.canonical(); if (Object.prototype.hasOwnProperty.call(visitedPackages, canonicalPackageDirectory)) continue; visitedPackages[canonicalPackageDirectory] = true; // check for duplicate package names if (Object.prototype.hasOwnProperty.call(catalog, name)) { continue; } if (!packageDirectory.join('package.json').isFile()) { //SYSTEM.log.warn('No package.json in ' + packageDirectory); continue; } var packageDatum; try { var packageDatumJson = packageDirectory.join('package.json').read({"charset": "UTF-8"}); packageDatum = JSON.parse(packageDatumJson || '{}'); // look for local, user overrides var local = packageDirectory.join('local.json'); if (local.isFile()) { local = JSON.parse(local.read({"charset": "UTF-8"})); for (var name in local) { if (Object.prototype.hasOwnProperty.call(local, name)) { packageDatum[name] = local[name]; } } } // overlay local package file var localOverlay = packageDirectory.join('package.local.json'); if (localOverlay.isFile()) { UTIL.deepUpdate(packageDatum, JSON.parse(localOverlay.read().toString())); } // If package declares it is a "using" package we do not load it into the system catalog. // This feature is important as using packages do not namespace their modules in a way // that is compatible with system packages. if(UTIL.has(packageDatum, "type") && packageDatum.type=="using") { continue; } // scan the <package>/using directory for "using" packages // TODO: This should run only *once* for the SEA package as "using" packages // should only be declared in <sea>/using // To make this work we need a way to identify the SEA package // in a reliable and consistent fashion. The SEA environment variable could? exports.readUsing(options, usingCatalog, packageDirectory.join("using")); // rewrite the package name to using/<name>/package.json if it is a using package if(dependencyInfo) { name = dependencyInfo.name; } else { // set name based on package*.json "name" property name = packageDatum.name || name; } catalog[name] = packageDatum; packageDatum.directory = packageDirectory.join(''); // add this system package to the usingCatalog exports.updateUsingCatalog(options, usingCatalog, packageDirectory, name, packageDatum); // if a dependency is referring to a 'using' package ID we add the // package being referenced to the system package catalog if(packageDatum.dependencies) { let deps = packageDatum.dependencies; if (deps.constructor !== Array) { // Cannot handle dependencies with version, just use the keys here. deps = Object.keys(deps); } deps.forEach(function(dependency) { if(Object.prototype.hasOwnProperty.call(usingCatalog, dependency) && !Object.prototype.hasOwnProperty.call(catalog, dependency)) { queue.push([ usingCatalog[dependency].directory, { "name": dependency } ]); } }); } // normalize authors if (packageDatum.author) packageDatum.author = new exports.Author(packageDatum.author); if (!packageDatum.contributors) packageDatum.contributors = []; packageDatum.contributors = packageDatum.contributors.map(function (contributor) { return new exports.Author(contributor); }); // enqueue sub packages var packagesDirectories = packageDatum.packages; if (typeof packagesDirectories == "string") packagesDirectories = [packagesDirectories]; if (packagesDirectories === undefined) packagesDirectories = ["packages"]; packagesDirectories.forEach(function (packagesDirectory) { packagesDirectory = packageDirectory.join(packagesDirectory); if (packagesDirectory.isDirectory()) { packagesDirectory.listPaths().forEach(function (packageDirectory) { if (packageDirectory.isDirectory()) { queue.push(packageDirectory); } }); } }); // the first package we encounter gets // top-billing, the root package if (!root) root = packageDatum; } catch (exception) { SYSTEM.log.error("Could not load package '" + name + "'. " + exception); } } } return root; };
exports.Tech.prototype.fileByPrefix = function (prefix) { return fs.path(prefix + '.' + this.getTechName()); };