From 96d0140e08c850dcbe3c4053714e1bab890851c7 Mon Sep 17 00:00:00 2001 From: Nick Downing Date: Mon, 8 Oct 2018 17:43:05 +1100 Subject: [PATCH] Break the main functionality into analytics, config, resources and site modules --- analytics.js | 89 ++++++++ config.js | 27 +++ ndserver.js | 559 +-------------------------------------------------- resources.js | 153 ++++++++++++++ site.js | 331 ++++++++++++++++++++++++++++++ 5 files changed, 603 insertions(+), 556 deletions(-) create mode 100644 analytics.js create mode 100644 config.js create mode 100644 resources.js create mode 100644 site.js diff --git a/analytics.js b/analytics.js new file mode 100644 index 0000000..bdec882 --- /dev/null +++ b/analytics.js @@ -0,0 +1,89 @@ +let fs = require('fs') +let util = require('util') + +let fs_writeFile = util.promisify(fs.writeFile) + +try { + fs.mkdirSync('analytics') +} +catch (err) { + if (err.code !== 'EEXIST') // should check error type + throw err +} + +let sessions +{ + let text + try { + text = fs.readFileSync('analytics/sessions.json', {encoding: 'utf-8'}) + } + catch (err) { + if (err.code !== 'ENOENT') // should check error type + throw err + text = '{}' + } + sessions = JSON.parse(text) +} +let sessions_is_dirty = false +let sessions_dirty = () => { + if (!sessions_is_dirty) { + sessions_is_dirty = true + setTimeout(sessions_write, 5000) + } +} +let sessions_write = async () => { + try { + sessions_is_dirty = false; + console.log('writing analytics/sessions.json') + await fs_writeFile( + 'analytics/sessions.json', + JSON.stringify(sessions), + {encoding: 'utf-8'} + ) + } + catch (err) { + console.error(err.stack || err.message) + } +} + +let pageviews +{ + let text + try { + text = fs.readFileSync('analytics/pageviews.json', {encoding: 'utf-8'}) + } + catch (err) { + if (err.code !== 'ENOENT') // should check error type + throw err + text = '{}' + } + pageviews = JSON.parse(text) +} +let pageviews_is_dirty = false +let pageviews_dirty = () => { + if (!pageviews_is_dirty) { + pageviews_is_dirty = true + setTimeout(pageviews_write, 5000) + } +} +let pageviews_write = async () => { + try { + pageviews_is_dirty = false; + console.log('writing analytics/pageviews.json') + await fs_writeFile( + 'analytics/pageviews.json', + JSON.stringify(pageviews), + {encoding: 'utf-8'} + ) + } + catch (err) { + console.error(err.stack || err.message) + } +} + +exports.sessions = sessions +exports.sessions_dirty = sessions_dirty +exports.sessions_write = sessions_write +exports.pageviews = pageviews +exports.pageviews_dirty = pageviews_dirty +exports.pageviews_write = pageviews_write diff --git a/config.js b/config.js new file mode 100644 index 0000000..a8f4368 --- /dev/null +++ b/config.js @@ -0,0 +1,27 @@ +let resources = require('./resources') + +let sites +let mime_types +let mime_type_html +let mime_type_default = 'application/octet-stream' + +let refresh = async () => { + sites = await resources.req_json('config/sites.json') + mime_types = await resources.req_json('config/mime_types.json') + mime_type_html = + mime_types.hasOwnProperty('html') ? + mime_types.html : + mime_type_default + + // a bit awkward... changing the exports on the fly + exports.sites = sites + exports.mime_types = mime_types + exports.mime_type_html = mime_type_html +} + +refresh() +exports.sites = undefined +exports.mime_types = undefined +exports.mime_type_html = undefined +exports.mime_type_default = mime_type_default +exports.refresh = refresh diff --git a/ndserver.js b/ndserver.js index 590287f..82eb733 100755 --- a/ndserver.js +++ b/ndserver.js @@ -1,241 +1,10 @@ #!/usr/bin/env node -let assert = require('assert') -let BuildCache = require('build_cache') let commander = require('commander') -let cookie = require('cookie') -let crypto = require('crypto') let fs = require('fs') let http = require('http') let https = require('https') -let jstemplate = require('jstemplate') -let less = require('less/lib/less-node') -let querystring = require('querystring') -var stream_buffers = require('stream-buffers') -let yauzl = require('yauzl') -let util = require('util') -let url = require('url') -let XDate = require('xdate') -let zetjs = require('zetjs') - -//let _setTimeout = util.promisify(setTimeout) -let fs_readFile = util.promisify(fs.readFile) -let fs_stat = util.promisify(fs.stat) -let fs_writeFile = util.promisify(fs.writeFile) -let yauzl_open = util.promisify(yauzl.open) - -try { - fs.mkdirSync('analytics') -} -catch (err) { - if (err.code !== 'EEXIST') // should check error type - throw err -} - -let sessions -{ - let text - try { - text = fs.readFileSync('analytics/sessions.json', {encoding: 'utf-8'}) - } - catch (err) { - if (err.code !== 'ENOENT') // should check error type - throw err - text = '{}' - } - sessions = JSON.parse(text) -} -let sessions_is_dirty = false -let sessions_dirty = () => { - if (!sessions_is_dirty) { - sessions_is_dirty = true - setTimeout(sessions_write, 5000) - } -} -let sessions_write = async () => { - try { - sessions_is_dirty = false; - console.log('writing analytics/sessions.json') - await fs_writeFile( - 'analytics/sessions.json', - JSON.stringify(sessions), - {encoding: 'utf-8'} - ) - } - catch (err) { - console.error(err.stack || err.message) - } -} - -let pageviews -{ - let text - try { - text = fs.readFileSync('analytics/pageviews.json', {encoding: 'utf-8'}) - } - catch (err) { - if (err.code !== 'ENOENT') // should check error type - throw err - text = '{}' - } - pageviews = JSON.parse(text) -} -let pageviews_is_dirty = false -let pageviews_dirty = () => { - if (!pageviews_is_dirty) { - pageviews_is_dirty = true - setTimeout(pageviews_write, 5000) - } -} -let pageviews_write = async () => { - try { - pageviews_is_dirty = false; - console.log('writing analytics/pageviews.json') - await fs_writeFile( - 'analytics/pageviews.json', - JSON.stringify(pageviews), - {encoding: 'utf-8'} - ) - } - catch (err) { - console.error(err.stack || err.message) - } -} - -let build_cache_js = new BuildCache() -let req_js = async path => { - let result = await build_cache_js.get(path) - if (result === undefined) { - console.log('compiling', path) - result = await jstemplate(path) - build_cache_js.set(path, result) - } - return result -} - -let build_cache_json = new BuildCache() -let req_json = async path => { - let result = await build_cache_json.get(path) - if (result === undefined) { - console.log('parsing', path) - result = JSON.parse(await fs_readFile(path)) - build_cache_json.set(path, result) - } - return result -} - -let build_cache_less = new BuildCache() -let req_less = async (path, site_root, dir_name) => { - let result = await build_cache_less.get(path) - if (result === undefined) { - console.log('compiling', path) - let render = await less.render( - await fs_readFile(path, {encoding: 'utf-8'}), - { - //color: true, - //compress: false, - //depends: false, - filename: path, - //globalVars: null, - //ieCompat: false, - //insecure: false, - //javascriptEnabled: false, - //lint: false, - //math: 0, - //modifyVars: null, - paths: [site_root + dir_name], - //plugins: [], - //reUsePluginManager: true, - //rewriteUrls: false, - rootpath: site_root//, - //strictImports: false, - //strictUnits: false, - //urlArgs: '' - } - ) - result = new Buffer(render.css, 'utf-8') - build_cache_less.set(path, result, render.imports) - } - return result -} - -let build_cache_text = new BuildCache() -let req_text = async path => { - let result = await build_cache_text.get(path) - if (result === undefined) { - console.log('reading', path) - result = await fs_readFile(path, {encoding: 'utf-8'}) - build_cache_text.set(path, result) - } - return result -} - -let build_cache_zip = new BuildCache() -let req_zip = async path => { - let result = await build_cache_zip.get(path) - if (result === undefined) { - console.log('decompressing', path) - result = {} - let zipfile = await yauzl_open(path, {autoClose: false}) - let entries = [] - await new Promise( - (resolve, reject) => { - zipfile. - on('entry', entry => {entries.push(entry)}). - on('end', () => resolve()) - } - ) - for (let i = 0; i < entries.length; ++i) { - let read_stream = await new Promise( - (resolve, reject) => { - zipfile.openReadStream( - entries[i], - (err, stream) => { - if (err) - reject(err) - resolve(stream) - } - ) - } - ) - let write_stream = new stream_buffers.WritableStreamBuffer() - let data = new Promise( - (resolve, reject) => { - write_stream. - on('finish', () => {resolve(write_stream.getContents())}). - on('error', () => {reject()}) - } - ) - read_stream.pipe(write_stream) - data = await data - console.log('entry path', entries[i].fileName, 'size', data.length) - result[entries[i].fileName] = data - } - await zipfile.close() - build_cache_zip.set(path, result) - } - return result -} - -let build_cache_zet = new BuildCache() -let req_zet = async path => { - let result = await build_cache_zet.get(path) - if (result === undefined) { - console.log('opening', path) - result = new zetjs.Index(path) - build_cache_zet.set( - path, - result, - [ - path + '.map.0', - path + '.param.0', - path + '.v.0', - path + '.vocab.0' - ] - ) - } - return result -} +let site = require('./site') commander.version('1.0.0').option( '-c, --enable-caching', @@ -258,331 +27,9 @@ commander.version('1.0.0').option( 8443 ).parse(process.argv) -let sites //= JSON.parse(fs.readFileSync('config/sites.json')) -let mime_types //= JSON.parse(fs.readFileSync('config/mime_types.json')) -let mime_type_default = 'application/octet-stream' - -let serve = (res, status, mime_type, data) => { - res.statusCode = status - // html files will be direct recipient of links/bookmarks so can't have - // a long lifetime, other files like css or images are often large files - // and won't change frequently (but we'll need cache busting eventually) - if ( - commander.enableCaching && - (!mime_types.hasOwnProperty('html') || mime_type !== mime_types['html']) - ) - res.setHeader('Cache-Control', 'max-age=3600') - res.setHeader('Content-Type', mime_type) - res.setHeader('Content-Length', data.length) - res.end(data) -} - -let die = res => { - let body = 'Page not found' - serve(res, 404, 'text/html; charset=utf-8', new Buffer(body, 'utf8')) -} - -let redirect = (res, location) => { - res.statusCode = 301 - res.setHeader('Location', location) - res.end('Redirecting to ' + location) -} - -let app = async (req, res, protocol) => { - // refresh global configuration variables - sites = await req_json('config/sites.json') - mime_types = await req_json('config/mime_types.json') - - let site = req.headers.host || 'localhost' - let temp = site.indexOf(':') - let port_suffix = temp === -1 ? '' : site.substring(temp) - site = site.substring(0, site.length - port_suffix.length) - if (!sites.hasOwnProperty(site)) { - console.log('nonexistent site', site) - die(res) - return - } - temp = sites[site] - let site_root - if (temp.type === 'redirect') { - let site_domain = temp.domain - console.log('redirecting', site, 'to', site_domain) - redirect(res, protocol + '://' + site_domain + port_suffix + req.url) - return - } - else if (temp.type === 'site') - site_root = temp.root - else - assert(false) - - // parse the pathname portion of url - // this is actually cheating since it's not a complete url - let parsed_url = url.parse(req.url, true) - let path = parsed_url.pathname.split('/') - - // path must begin with / - if (path.length === 0 || path[0].length) - return die(res) - - // path elements must be findable in the file system (thus can't be empty) - let dir_name = '' - let dir_name_is_pub = false - for (let i = 1; i < path.length - 1; ++i) { - dir_name += '/' + path[i] - if (path[i].length === 0 || path[i].charAt(0) === '.') { - console.log(site, 'bad path component', dir_name) - return die(res) - } - let stats - try { - stats = await fs_stat(site_root + dir_name) - } - catch (err) { - if (err.code !== 'ENOENT') - throw err - if (!dir_name_is_pub) { - temp = dir_name + '.pub' - try { - stats = await fs_stat(site_root + temp) - dir_name = temp - dir_name_is_pub = true - } - catch (err2) { - if (err2.code !== 'ENOENT') - throw err2 - console.log(site, 'directory not found', dir_name) - return die(res) - } - } - if (!stats.isDirectory()) { - console.log(site, 'not directory', dir_name) - return die(res) - } - } - } - - file_name = path[path.length - 1] - if (file_name === '') { - path[path.length - 1] = 'index.html' - path = path.join('/') - console.log(site, 'redirecting', parsed_url.pathname, 'to', path) - redirect(res, path + (parsed_url.search || '')) - return - } - let page = path.slice(1).join('/') - - temp = file_name.lastIndexOf('.') - let file_type = temp === -1 ? '' : file_name.substring(temp + 1) - let mime_type = - mime_types.hasOwnProperty(file_type) ? - mime_types[file_type] : - mime_type_default - - if (file_type == 'html') { - if (!sessions.hasOwnProperty(site)) - sessions[site] = {} - let site_sessions = sessions[site] - let cookies = cookie.parse(req.headers.cookie || ''), session_key - if ( - !cookies.hasOwnProperty('session_key') || - !site_sessions.hasOwnProperty(session_key = cookies.session_key) - ) { - session_key = crypto.randomBytes(16).toString('hex') - site_sessions[session_key] = {} - } - let session = site_sessions[session_key] - - let expires = new XDate() - expires.addMonths(1) - expires = expires.toUTCString() - res.setHeader( - 'Set-Cookie', - 'session_key=' + session_key + '; expires=' + expires + '; path=/;' - ) - session.expires = expires - - if (!pageviews.hasOwnProperty(site)) - pageviews[site] = {} - let site_pageviews = pageviews[site] - if (!site_pageviews.hasOwnProperty(page)) - site_pageviews[page] = {visits: 0, unique_visits: 0} - let pageview = site_pageviews[page] - ++pageview.visits; - - - if (!session.hasOwnProperty('pageviews')) - session.pageviews = {} - let session_pageviews = session.pageviews - if (!session_pageviews.hasOwnProperty(page)) { - session_pageviews[page] = 0 - ++pageview.unique_visits - } - ++session_pageviews[page] - - sessions_dirty() - pageviews_dirty() - } - - /*let*/ page = dir_name + '/' + file_name; let data - if (dir_name_is_pub) { - try { - let data = await fs_readFile(site_root + page) - console.log( - site, - 'serving', - page, - 'length', - data.length, - 'from pub' - ) - serve(res, 200, mime_type, data) - return - } - catch (err) { - if (err.code !== 'ENOENT') - throw err - } - } - else { - temp = page + '.pub' - try { - let data = await fs_readFile(site_root + temp) - console.log( - site, - 'serving', - temp, - 'length', - data.length, - 'from pub' - ) - serve(res, 200, mime_type, data) - return - } - catch (err) { - if (err.code !== 'ENOENT') - throw err - } - - switch (file_type) { - case 'html': - temp = page + '.js' - try { - let buffers = [] - let env = { - lang: 'en', - page: page, - query: parsed_url.query, - site: site, - site_root: site_root - } - let out = str => {buffers.push(Buffer.from(str))} - let req = async (str, type) => { - let path = ( - str.length > 0 && str.charAt(0) === '/' ? - site_root : - site_root + dir_name + '/' - ) + str, result - switch (type) { - case undefined: - result = await (await req_js(path))(env, out, req) - break - case 'js': - result = await req_js(path) - break - case 'json': - result = await req_json(path) - break - case 'text': - result = await req_text(path) - break - case 'zet': - result = await req_zet(path) - break - default: - assert(false) - } - return result - } - await req(temp) - let data = Buffer.concat(buffers) - console.log( - site, - 'serving', - temp, - 'length', - data.length, - 'from js' - ) - serve(res, 200, mime_type, data) - return - } - catch (err) { - if (err.code !== 'ENOENT') // should check error type - throw err - } - break - - case 'css': - temp = page + '.less' - try { - let data = await req_less(site_root + temp, site_root, dir_name) - console.log( - site, - 'serving', - temp, - 'length', - data.length, - 'from less' - ) - serve(res, 200, mime_type, data) - return - } - catch (err) { - if (err.code !== 'ENOENT') // note: err.code might be undefined - throw err - } - break - } - } - - let favicons = await req_zip(site_root + '/favicons.zip') - temp = page.substring(1) // fix this to avoid leading / on all absolute paths - if (favicons.hasOwnProperty(temp)) { - let data = favicons[temp] - console.log( - site, - 'serving', - page, - 'length', - data.length, - 'from favicons' - ) - serve(res, 200, mime_type, data) - return - } - - console.log(site, 'file not found', page) - return die(res) -} - -let tryApp = (req, res, protocol) => { - app(req, res, protocol).catch( - err => { - console.error(err.stack || err.message) - let body = - '
' +
-        (err.stack || err.message) +
-        '
' - serve(res, 500, 'text/html; charset=utf-8', new Buffer(body, 'utf8')) - } - ) - // note: the promise is forgotten about here, so each incoming request - // proceeds in an unsupervised fashion to eventual completion or error -} - if (commander.httpPort !== -1) { http.createServer( - (req, res) => tryApp(req, res, 'http') + (req, res) => site.tryApp(req, res, 'http') // ignore returned promise ).listen(commander.httpPort) console.log('HTTP server listening on port', commander.httpPort) } @@ -592,7 +39,7 @@ if (commander.httpsPort !== -1) { 'cert': fs.readFileSync(commander.sslCert), 'key': fs.readFileSync(commander.sslKey) }, - (req, res) => tryApp(req, res, 'https') + (req, res) => site.tryApp(req, res, 'https') // ignore returned promise ).listen(commander.httpsPort) console.log('HTTPS server listening on port', commander.httpsPort) } diff --git a/resources.js b/resources.js new file mode 100644 index 0000000..72461e5 --- /dev/null +++ b/resources.js @@ -0,0 +1,153 @@ +let BuildCache = require('build_cache') +let fs = require('fs') +let jstemplate = require('jstemplate') +let less = require('less/lib/less-node') +var stream_buffers = require('stream-buffers') +let util = require('util') +let yauzl = require('yauzl') +let zetjs = require('zetjs') + +let fs_readFile = util.promisify(fs.readFile) +let yauzl_open = util.promisify(yauzl.open) + +let build_cache_js = new BuildCache() +let req_js = async path => { + let result = await build_cache_js.get(path) + if (result === undefined) { + console.log('compiling', path) + result = await jstemplate(path) + build_cache_js.set(path, result) + } + return result +} + +let build_cache_json = new BuildCache() +let req_json = async path => { + let result = await build_cache_json.get(path) + if (result === undefined) { + console.log('parsing', path) + result = JSON.parse(await fs_readFile(path)) + build_cache_json.set(path, result) + } + return result +} + +let build_cache_less = new BuildCache() +let req_less = async (path, site_root, dir_name) => { + let result = await build_cache_less.get(path) + if (result === undefined) { + console.log('compiling', path) + let render = await less.render( + await fs_readFile(path, {encoding: 'utf-8'}), + { + //color: true, + //compress: false, + //depends: false, + filename: path, + //globalVars: null, + //ieCompat: false, + //insecure: false, + //javascriptEnabled: false, + //lint: false, + //math: 0, + //modifyVars: null, + paths: [site_root + dir_name], + //plugins: [], + //reUsePluginManager: true, + //rewriteUrls: false, + rootpath: site_root//, + //strictImports: false, + //strictUnits: false, + //urlArgs: '' + } + ) + result = new Buffer(render.css, 'utf-8') + build_cache_less.set(path, result, render.imports) + } + return result +} + +let build_cache_text = new BuildCache() +let req_text = async path => { + let result = await build_cache_text.get(path) + if (result === undefined) { + console.log('reading', path) + result = await fs_readFile(path, {encoding: 'utf-8'}) + build_cache_text.set(path, result) + } + return result +} + +let build_cache_zip = new BuildCache() +let req_zip = async path => { + let result = await build_cache_zip.get(path) + if (result === undefined) { + console.log('decompressing', path) + result = {} + let zipfile = await yauzl_open(path, {autoClose: false}) + let entries = [] + await new Promise( + (resolve, reject) => { + zipfile. + on('entry', entry => {entries.push(entry)}). + on('end', () => resolve()) + } + ) + for (let i = 0; i < entries.length; ++i) { + let read_stream = await new Promise( + (resolve, reject) => { + zipfile.openReadStream( + entries[i], + (err, stream) => { + if (err) + reject(err) + resolve(stream) + } + ) + } + ) + let write_stream = new stream_buffers.WritableStreamBuffer() + let data = new Promise( + (resolve, reject) => { + write_stream. + on('finish', () => {resolve(write_stream.getContents())}). + on('error', () => {reject()}) + } + ) + read_stream.pipe(write_stream) + data = await data + console.log('entry path', entries[i].fileName, 'size', data.length) + result[entries[i].fileName] = data + } + await zipfile.close() + build_cache_zip.set(path, result) + } + return result +} + +let build_cache_zet = new BuildCache() +let req_zet = async path => { + let result = await build_cache_zet.get(path) + if (result === undefined) { + console.log('opening', path) + result = new zetjs.Index(path) + build_cache_zet.set( + path, + result, + [ + path + '.map.0', + path + '.param.0', + path + '.v.0', + path + '.vocab.0' + ] + ) + } + return result +} + +exports.req_js = req_js +exports.req_json = req_json +exports.req_less = req_less +exports.req_text = req_text +exports.req_zip = req_zip +exports.req_zet = req_zet diff --git a/site.js b/site.js new file mode 100644 index 0000000..8db0246 --- /dev/null +++ b/site.js @@ -0,0 +1,331 @@ +let analytics = require('./analytics') +let assert = require('assert') +let cookie = require('cookie') +let crypto = require('crypto') +let fs = require('fs') +let config = require('./config') +let resources = require('./resources') +let util = require('util') +let url = require('url') +let XDate = require('xdate') + +let fs_readFile = util.promisify(fs.readFile) +let fs_stat = util.promisify(fs.stat) + +let serve = (res, status, mime_type, data) => { + res.statusCode = status + // html files will be direct recipient of links/bookmarks so can't have + // a long lifetime, other files like css or images are often large files + // and won't change frequently (but we'll need cache busting eventually) + if ( + false && //commander.enableCaching && + mime_type !== config.mime_types_html + ) + res.setHeader('Cache-Control', 'max-age=3600') + res.setHeader('Content-Type', mime_type) + res.setHeader('Content-Length', data.length) + res.end(data) +} + +let die = res => { + let body = 'Page not found' + serve(res, 404, config.mime_type_html, new Buffer(body, 'utf8')) +} + +let redirect = (res, location) => { + res.statusCode = 301 + res.setHeader('Location', location) + res.end('Redirecting to ' + location) +} + +let app = async (req, res, protocol) => { + let site = req.headers.host || 'localhost' + let temp = site.indexOf(':') + let port_suffix = temp === -1 ? '' : site.substring(temp) + site = site.substring(0, site.length - port_suffix.length) + if (!config.sites.hasOwnProperty(site)) { + console.log('nonexistent site', site) + die(res) + return + } + temp = config.sites[site] + let site_root + if (temp.type === 'redirect') { + let site_domain = temp.domain + console.log('redirecting', site, 'to', site_domain) + redirect(res, protocol + '://' + site_domain + port_suffix + req.url) + return + } + else if (temp.type === 'site') + site_root = temp.root + else + assert(false) + + // parse the pathname portion of url + // this is actually cheating since it's not a complete url + let parsed_url = url.parse(req.url, true) + let path = parsed_url.pathname.split('/') + + // path must begin with / + if (path.length === 0 || path[0].length) + return die(res) + + // path elements must be findable in the file system (thus can't be empty) + let dir_name = '' + let dir_name_is_pub = false + for (let i = 1; i < path.length - 1; ++i) { + dir_name += '/' + path[i] + if (path[i].length === 0 || path[i].charAt(0) === '.') { + console.log(site, 'bad path component', dir_name) + return die(res) + } + let stats + try { + stats = await fs_stat(site_root + dir_name) + } + catch (err) { + if (err.code !== 'ENOENT') + throw err + if (!dir_name_is_pub) { + temp = dir_name + '.pub' + try { + stats = await fs_stat(site_root + temp) + dir_name = temp + dir_name_is_pub = true + } + catch (err2) { + if (err2.code !== 'ENOENT') + throw err2 + console.log(site, 'directory not found', dir_name) + return die(res) + } + } + if (!stats.isDirectory()) { + console.log(site, 'not directory', dir_name) + return die(res) + } + } + } + + file_name = path[path.length - 1] + if (file_name === '') { + path[path.length - 1] = 'index.html' + path = path.join('/') + console.log(site, 'redirecting', parsed_url.pathname, 'to', path) + redirect(res, path + (parsed_url.search || '')) + return + } + let page = path.slice(1).join('/') + + temp = file_name.lastIndexOf('.') + let file_type = temp === -1 ? '' : file_name.substring(temp + 1) + let mime_type = + config.mime_types.hasOwnProperty(file_type) ? + config.mime_types[file_type] : + config.mime_type_default + + if (file_type == 'html') { + if (!analytics.sessions.hasOwnProperty(site)) + analytics.sessions[site] = {} + let site_sessions = analytics.sessions[site] + let cookies = cookie.parse(req.headers.cookie || ''), session_key + if ( + !cookies.hasOwnProperty('session_key') || + !site_sessions.hasOwnProperty(session_key = cookies.session_key) + ) { + session_key = crypto.randomBytes(16).toString('hex') + site_sessions[session_key] = {} + } + let session = site_sessions[session_key] + + let expires = new XDate() + expires.addMonths(1) + expires = expires.toUTCString() + res.setHeader( + 'Set-Cookie', + 'session_key=' + session_key + '; expires=' + expires + '; path=/;' + ) + session.expires = expires + + if (!analytics.pageviews.hasOwnProperty(site)) + analytics.pageviews[site] = {} + let site_pageviews = analytics.pageviews[site] + if (!site_pageviews.hasOwnProperty(page)) + site_pageviews[page] = {visits: 0, unique_visits: 0} + let pageview = site_pageviews[page] + ++pageview.visits; + + + if (!session.hasOwnProperty('analytics.pageviews')) + session.pageviews = {} + let session_pageviews = session.pageviews + if (!session_pageviews.hasOwnProperty(page)) { + session_pageviews[page] = 0 + ++pageview.unique_visits + } + ++session_pageviews[page] + + analytics.sessions_dirty() + analytics.pageviews_dirty() + } + + /*let*/ page = dir_name + '/' + file_name; let data + if (dir_name_is_pub) { + try { + let data = await fs_readFile(site_root + page) + console.log( + site, + 'serving', + page, + 'length', + data.length, + 'from pub' + ) + serve(res, 200, mime_type, data) + return + } + catch (err) { + if (err.code !== 'ENOENT') + throw err + } + } + else { + temp = page + '.pub' + try { + let data = await fs_readFile(site_root + temp) + console.log( + site, + 'serving', + temp, + 'length', + data.length, + 'from pub' + ) + serve(res, 200, mime_type, data) + return + } + catch (err) { + if (err.code !== 'ENOENT') + throw err + } + + switch (file_type) { + case 'html': + temp = page + '.js' + try { + let buffers = [] + let env = { + lang: 'en', + page: page, + query: parsed_url.query, + site: site, + site_root: site_root + } + let out = str => {buffers.push(Buffer.from(str))} + let req = async (str, type) => { + let path = ( + str.length > 0 && str.charAt(0) === '/' ? + site_root : + site_root + dir_name + '/' + ) + str, result + switch (type) { + case undefined: + result = await (await resources.req_js(path))(env, out, req) + break + case 'js': + result = await resources.req_js(path) + break + case 'json': + result = await resources.req_json(path) + break + case 'text': + result = await resources.req_text(path) + break + case 'zet': + result = await resources.req_zet(path) + break + default: + assert(false) + } + return result + } + await req(temp) + let data = Buffer.concat(buffers) + console.log( + site, + 'serving', + temp, + 'length', + data.length, + 'from js' + ) + serve(res, 200, mime_type, data) + return + } + catch (err) { + if (err.code !== 'ENOENT') // should check error type + throw err + } + break + + case 'css': + temp = page + '.less' + try { + let data = await resources.req_less(site_root + temp, site_root, dir_name) + console.log( + site, + 'serving', + temp, + 'length', + data.length, + 'from less' + ) + serve(res, 200, mime_type, data) + return + } + catch (err) { + if (err.code !== 'ENOENT') // note: err.code might be undefined + throw err + } + break + } + } + + let favicons = await resources.req_zip(site_root + '/favicons.zip') + temp = page.substring(1) // fix this to avoid leading / on all absolute paths + if (favicons.hasOwnProperty(temp)) { + let data = favicons[temp] + console.log( + site, + 'serving', + page, + 'length', + data.length, + 'from favicons' + ) + serve(res, 200, mime_type, data) + return + } + + console.log(site, 'file not found', page) + return die(res) +} + +let tryApp = async (req, res, protocol) => { + await config.refresh() + try { + await app(req, res, protocol) + } + catch (err) { + let message = (err.stack || err.message).toString() + console.error(message) + let body = '
' + message + '
' + serve(res, 500, config.mime_type_html, new Buffer(body, 'utf8')) + } +} + +exports.serve = serve +exports.die = die +exports.redirect = redirect +exports.app = app +exports.tryApp = tryApp -- 2.34.1