Skip to content

Commit

Permalink
Backlogs CLI
Browse files Browse the repository at this point in the history
  • Loading branch information
niclake committed Jul 7, 2024
1 parent 6ae018d commit 10882fb
Show file tree
Hide file tree
Showing 7 changed files with 747 additions and 0 deletions.
48 changes: 48 additions & 0 deletions cli/backlogs/books.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import 'dotenv/config'
import { GoogleSpreadsheet } from 'google-spreadsheet';
import { JWT } from 'google-auth-library';
import path from 'path'
import { fileURLToPath } from 'url'
import fs from 'fs'

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename)
const __siteroot = __dirname.replace('/cli/backlogs', '');
const __target = '/src/_data/backlogs/'

// Initialize auth - see https://theoephraim.github.io/node-google-spreadsheet/#/guides/authentication
const serviceAccountAuth = new JWT({
// env var values here are copied from service account credentials generated by google
// see "Authentication" section in docs for more info
email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL,
key: process.env.GOOGLE_PRIVATE_KEY,
scopes: ['https://www.googleapis.com/auth/spreadsheets'],
})

const books = new GoogleSpreadsheet(process.env.BOOKS_SHEET_ID, serviceAccountAuth)

await books.loadInfo() // loads document properties and worksheets
const booksSheet = books.sheetsByTitle['Books'] // or use `doc.sheetsById[id]` or `doc.sheetsByTitle[title]`
const allBooks = await booksSheet.getRows()
const jsonArr = []
for (var i = 0; i < allBooks.length; i++) {
jsonArr.push({
title: allBooks[i]._rawData[0],
authorFirst: allBooks[i]._rawData[1],
authorLast: allBooks[i]._rawData[2],
genre: allBooks[i]._rawData[3],
series: allBooks[i]._rawData[4],
owned: allBooks[i]._rawData[5],
read: allBooks[i]._rawData[6],
compDate: allBooks[i]._rawData[7],
status: allBooks[i]._rawData[8],
information: allBooks[i]._rawData[9],
})
}
// console.log(allBooks[8]._rawData.slice(0,10))
const __targetFile = 'books.json'
fs.writeFile(__siteroot + __target + __targetFile, JSON.stringify(jsonArr), function(err) {
if (err) {
console.log(err)
}
})
47 changes: 47 additions & 0 deletions cli/backlogs/games.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import 'dotenv/config'
import { GoogleSpreadsheet } from 'google-spreadsheet';
import { JWT } from 'google-auth-library';
import path from 'path'
import { fileURLToPath } from 'url'
import fs from 'fs'

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename)
const __siteroot = __dirname.replace('/cli/backlogs', '');
const __target = '/src/_data/backlogs/'

// Initialize auth - see https://theoephraim.github.io/node-google-spreadsheet/#/guides/authentication
const serviceAccountAuth = new JWT({
// env var values here are copied from service account credentials generated by google
// see "Authentication" section in docs for more info
email: process.env.GOOGLE_SERVICE_ACCOUNT_EMAIL,
key: process.env.GOOGLE_PRIVATE_KEY,
scopes: ['https://www.googleapis.com/auth/spreadsheets'],
})

const games = new GoogleSpreadsheet(process.env.GAMES_SHEET_ID, serviceAccountAuth)

await games.loadInfo() // loads document properties and worksheets
const gamesSheet = games.sheetsByTitle['Master List'] // or use `doc.sheetsById[id]` or `doc.sheetsByTitle[title]`
const allGames = await gamesSheet.getRows()
const jsonArr = []
for (var i = 0; i < allGames.length; i++) {
jsonArr.push({
title: allGames[i]._rawData[0],
series: allGames[i]._rawData[1],
seriesOrder: allGames[i]._rawData[2],
system: allGames[i]._rawData[4],
hrEst: allGames[i]._rawData[5],
owned: allGames[i]._rawData[6],
status: allGames[i]._rawData[7],
compDate: allGames[i]._rawData[8],
hrComp: allGames[i]._rawData[9],
information: allGames[i]._rawData[10],
})
}
const __targetFile = 'games.json'
fs.writeFile(__siteroot + __target + __targetFile, JSON.stringify(jsonArr), function(err) {
if (err) {
console.log(err)
}
})
Loading

0 comments on commit 10882fb

Please sign in to comment.