snapper's notebooks

  • Lodash query flow - /snapper/lodash-query-flow
    Last edited 7 years ago
    const {flow, concat} = require("lodash") const { filter, map, pick, sortBy, take } = require('lodash/fp') function query(data, funcs) { return flow(...concat([], funcs))(Array.isArray(data) ? data : [data]) } const store = { editing: { "saving": false, "etag": "W/\"MjAwNi0wNi0xNlQxNDo1NzoxOA==\"", "hasChanges": false, "values": [ { "id": [ { "tblTrans": 3159 } ], "Location": [ { "value": 994, "description": "Northern Europe - United Kingdom - Barra 1 - Area E10-40" } ], "CaseDate": [ { "value": "2006-06-11T02:00:00" } ], "Title": [ { "value": "This case is for statistical purposes" } ], "Description": [ { "value": "This case is for statistical purposes" } ], "caseNumber": [ { "value": 100 } ], "caseType": [ { "value": 24, "description": "Quality nonconformity" } ] }, { "id": [ { "tblTrans": 3159 }, { "TblAction": 1 } ], "ActionDeadline": [ { "value": "2006-06-25T00:00:00" } ], "ActionUnitResponsible": [ { "value": 2, "description": "PO" } ], "ActionCompletedDate": [ { "value": "2006-06-16T00:00:00" } ], "ActionDescription": [ { "value": "This case is for statistical purposes" } ] } ] }}; function select(table, funcs){ return query( store.editing.values.filter( x => x.id[1] && x.id[1][table] ), funcs); } select( 'TblAction', [ map( pick(['id', 'ActionDeadline']) ), filter( (action) => action.ActionDeadline && action.ActionDeadline[0].value === "2006-06-25T00:00:00" ) , take(3) ])
  • RunKit + npm: normalizing legacy JSON - /snapper/normalizing-json
    Last edited 7 years ago - from: https://runkit.com/npm/normalizr
    let raw = { primaryKey: { v: 3142 }, caseNumber: 0, caseType: { v: 21, d: 'HSE incident - Near miss' }, fields: { Location: { values: [ { v: 553, d: 'Northern Europe - Norway - Adm. building Norway' } ] }, CaseDate: { values: [ { v: '2017-01-26T15:44:46' } ] }, Title: { values: [{}] }, Description: { values: [{}] }, }, subTables: { TblLossPotEvaluation: [ { primaryKey: { v: 1 }, subTables: { TblFooBar: [{primaryKey:{v:666}, fields: { Dust: {values: [{v:999}] } }}] }, fields: { PotRiskEvRecurrence: { values: [ { v: 0 } ] }, PotRiskEvLossPotentialRevNo: { values: [ { v: 1 } ] }, PotRiskEvRecurrenceFactor: { values: [{}] } } }, { primaryKey: { v: 2 }, subTables: { TblFooBar: [{primaryKey:{v:777}, fields: { Dust: {values: [{v:888}] } }}] }, fields: { PotRiskEvRecurrence: { values: [ { v: 8 } ] }, PotRiskEvLossPotentialRevNo: { values: [ { v: 8 } ] }, PotRiskEvRecurrenceFactor: { values: [{}] } } } ] } }; const preProcessTableRows = (table, tableName, parentPk, parent, root) => { // manual normalizr root.entities[tableName] = root.entities[tableName] || [] // for reference ids parent[tableName] = parent[tableName] || [] table.forEach( row => { // create uniuqe id from legacy combined keys row.id = { ...parentPk, ... { [tableName] : row.primaryKey.v} } const tablesForThisRow = Object.keys(row.subTables || {}) tablesForThisRow.forEach( subTableName => preProcessTableRows( row.subTables[subTableName], subTableName, row.id, row, root )) // id must be string for referencing, and as key for collections const rowId = JSON.stringify(row.id) // add this row id to parent as reference parent[tableName].push(rowId) // delete noise delete row.subTables const finalRow = { ...row, ...row.fields, ...{id: row.id} } root.entities[tableName][rowId] = finalRow //root.byId[rowId] = finalRow }) } const entitiesFound = {}; const process = (org) => { // shallow copy const doc = {...org} // const docid = JSON.stringify(doc.primaryKey.v) doc.byId = {} doc.entities = {} doc.entities.TblTrans = {[docid] : {...doc, ...doc.fields} } doc.byId[docid] = {...doc, ...doc.fields} // recusively flatten each item in subTable, after assigning parent primaryKey.v Object.keys(doc.subTables) .forEach( table => preProcessTableRows( doc.subTables[table], table, {TblTrans: doc.primaryKey.v}, /*doc.entities.TblTrans[docid]*/ doc, doc) ) return doc; }; // normalizer needs to know the name of keys beforehand, no go //const schema = {}; console.log( process(raw) )
  • concat-ftw - /snapper/concat-ftw
    Last edited 7 years ago
    function concat(url1, ...paths){ const base = url1.substr(-1)==='/' ? url1 : `${url1}/`; return [...paths].reduce((prev, current) => { const path = current.match(/[^\/\\]+/g).join('/') return /[\?|\&|\=]/.test(path) || /\/$/.test(prev) ? `${prev}${path}` : `${prev}/${path}`; },base); } function resolveUrl(baseUrl, query, ...paths){ const urlObject: any = parse(baseUrl || '/'); const endpointPath = [endpoint, ...paths].filter(i => i === 0 || !!i); urlObject.pathname = concat(urlObject.pathname, ...paths ); urlObject.query = query; return format(urlObject); } console.log( resolveUrl('http://domain.no/', {a:1}, '/person/post', 'now') ) console.log( concat('http://domain.com/', '/aweful/WICKED/', '/hard/', '/to/', 'join/', 'these', 'variants', '?considering=how', '&easy=it', '&is') )
  • unfluff - /snapper/article
    Last edited 7 years ago
    var endpoint = require("@runkit/runkit/json-endpoint/1.0.0") const parse = require('unfluff'); const req = require('superagent') const cheerio = require('cheerio') endpoint(module.exports, async function(request) { const start = Date.now(); const res = await req.get(request.query.url); let receive = (Date.now()-start) let article = parse.lazy(res.text, request.query.lang || 'no') let articleText = article.text(); let parseDt = (Date.now()-start) if(!articleText){ // try css extract let dom = cheerio.load(res.text); articleText = dom('.article-body p').text() } const result = { title: article.softTitle() || article.title(), description: article.description(), text: articleText || article.text(), date: article.date(), perf: { request: Date.now()-start, receive: receive, parse: parseDt } } return result; })