Initial commit

This commit is contained in:
simon987 2019-04-07 11:25:50 -04:00
commit 797fae9ec1
32 changed files with 27663 additions and 0 deletions

128
.gitignore vendored Normal file
View File

@ -0,0 +1,128 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# dependencies
node_modules/
# testing
/coverage
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
yarn.lock
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.idea/
in/
repo/
tmp/
workspace/
worker.json

6
.gitmodules vendored Normal file
View File

@ -0,0 +1,6 @@
[submodule "task_tracker_drone"]
path = task_tracker_drone
url = https://github.com/simon987/task_tracker_drone/
[submodule "last.fm"]
path = last.fm
url = https://git.simon987.net/drone/last.fm

0
__init__.py Normal file
View File

355
convert_mb.py Normal file
View File

@ -0,0 +1,355 @@
import os
links = dict()
link_types = dict()
areas = dict()
labels = dict()
label_types = {
"\\N": ""
}
release_groups = dict()
release_to_release_group_map = dict()
release_types = {
"\\N": "",
}
artists = dict()
tags = dict()
release_release_rel_map = {
"covers and versions": "",
"remixes and compilations": "",
"DJ-mix": "IS_DJ_MIX_OF",
"live performance": "IS_LIVE_PERFORMANCE_OF",
"cover": "IS_COVER_OF",
"remix": "IS_REMIX_OF",
"mashes up": "IS_MASHUP_OF",
"included in": "INCLUDED_IN",
"single from": "IS_SINGLE_FROM"
}
artist_release_rel_map = {
"translator": "TRANSLATED",
"liner notes": "WROTE_LINER_NOTES",
"lyricist": "IS_LYRICIST_FOR",
"lacquer cut": "DID_LACQUER_CUT_FOR",
"samples from artist": "HAS_SAMPLES_IN",
"remixes and compilations": "",
"composition": "COMPOSED",
"booking": "DID_BOOKING_FOR",
"balance": "DID_BALANCE_FOR",
"misc": "HAS_MISC_ROLE_IN",
"conductor": "CONDUCTED",
"legal representation": "PROVIDED_LEGAL_REPRESENTATION_FOR",
"design/illustration": "DID_DESIGN_FOR",
"performing orchestra": "PERFORMED_FOR",
"producer": "PRODUCED",
"instrument": "PERFORMED_INSTRUMENT_FOR",
"writer": "WROTE_LYRICS_FOR",
"production": "DID_PRODUCTION_FOR",
"performance": "PERFORMED_FOR",
"composer": "IS_COMPOSER_FOR",
"sound": "DID_SOUND_FOR",
"remixer": "DID_REMIXING_FOR",
"orchestrator": "IS_ORCHESTRATOR_FOR",
"compiler": "DID_COMPILATION_FOR",
"vocal arranger": "IS_ARRANGER_FOR",
"arranger": "IS_ARRENGER_FOR",
"mix-DJ": "MIXED",
"editor": "IS_EDITOR_FOR",
"illustration": "DID_ILLUSTRATION_FOR",
"audio": "DID_AUDIO_FOR",
"publishing": "IS_PUBLISHER_FOR",
"art direction": "DID_ART_DIRECTOR_FOR",
"design": "DID_DESIGN_FOR",
"instrument arranger": "IS_ARRANGER_FOR",
"chorus master": "IS_CHORUS_MASTER_FOR",
"photography": "DID_PHOTOGRAPHY_FOR",
"performer": "PERFORMED_IN",
"graphic design": "DID_GRAPHIC_DESIGN_FOR",
"booklet editor": "IS_BOOKLET_EDITOR_FOR",
"programming": "DID_PROGRAMING_FOR",
"copyright": "IS_COPYRIGHT_HOLDER_OF",
"piano technician": "IS_PIANO_TECNICIAN_FOR",
"phonographic copyright": "IS_PHONOGRAPHIC_COPYRIGHT_HOLDER_OF",
"mastering": "DID_MASTERING_FOR",
"vocal": "PERFORED_VOCALS_FOR",
"librettist": "IS_LIBRETTIST_FOR",
"mix": "MIXED",
"recording": "DID_RECORDING_FOR",
"concertmaster": "IS_CONCERTMASTER_FOR",
"engineer": "IS_ENGINEER_FOR",
# release_group
"tribute": "IS_TRIBUTE_TO",
"dedicated to": "IS_DEDICATED_TO",
"creative direction": "",
"artists and repertoire": ""
}
artist_artist_rel_map = {
"teacher": "TEACHER_OF",
"composer-in-residence": "HAS_COMPOSER-IN-RESIDENCE_STATUS_IN",
"member of band": "IS_MEMBER_OF",
"voice actor": "IS_VOICE_ACTOR_OF",
"tribute": "IS_TRIBUTE_TO",
"supporting musician": "IS_SUPPORTING_MUSICIAN_OF",
"instrumental supporting musician": "IS_INSTRUMENTAL_SUPPORTING_MUSICIAN_OF",
"personal relationship": "HAS_PERSONAL_RELATIONSHIP_WITH",
"musical relationships": "HAS_MUSICAL_RELATIONSHIP_WITH",
"collaboration": "HAS_COLLABORATED_WITH",
"married": "IS_MARRIED_WITH",
"sibling": "IS_SIBLING_OF",
"parent": "IS_PARENT_OF",
"is person": "IS",
"conductor position": "IS_CONDUCTOR_OF",
"vocal supporting musician": "DOES_VOCAL_SUPPORT_FOR",
"artistic director": "IS_ARTIST_DIRECTOR_OF",
"subgroup": "IS_SUBGROUP_OF",
"founder": "IS_FOUNDER_OF",
"involved with": "IS_INVOLVED_WITH",
}
label_label_rel_map = {
"label rename": "WAS_RENAMED_TO",
"imprint": "DOES_IMPRINT_FOR",
"label distribution": "DOES_DISTRIBUTION_FOR",
"business association": "HAS_BUSINESS_ASSOCIATION_TO",
"label ownership": "OWNS",
"label reissue": "DOES_REISSUING_FOR"
}
if not os.path.exists("repo"):
os.mkdir("repo")
else:
os.system("rm repo/*")
if not os.path.exists("tmp"):
os.mkdir("tmp")
else:
os.system("rm tmp/*")
with open("in/link", "r") as f:
for line in f:
cols = line.split("\t")
links[cols[0]] = cols
with open("in/link_type", "r") as f:
for line in f:
cols = line.split("\t")
link_types[cols[0]] = cols
with open("in/area", "r") as f:
for line in f:
cols = line.split("\t")
areas[cols[0]] = cols
with open("in/label_type") as f:
for line in f:
cols = line.split("\t")
label_types[cols[0]] = ";" + cols[1].replace(" ", "")
if cols[3] != "\\N" and cols[2] in label_types:
label_types[cols[0]] += label_types[cols[2]].replace(" ", "")
with open("in/artist") as f:
for line in f:
cols = line.split("\t")
artists[cols[0]] = cols
with open("repo/area_area.csv", "w") as out:
out.write(":START_ID(Area),:END_ID(Area)\n")
with open("in/l_area_area", "r") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((areas[cols[3]][1],
areas[cols[2]][1]
)) + "\n")
with open("repo/area.csv", "w") as out:
out.write("id:ID(Area),name\n")
for k, area in areas.items():
out.write(",".join((area[1],
'"' + area[2] + '"'
)) + "\n")
# ------
out_artist = open("repo/artist.csv", "w")
out_artist_area = open("repo/artist_area.csv", "w")
out_artist.write("id:ID(Artist),name,year:int,:LABEL\n")
out_artist_area.write(":START_ID(Artist),:END_ID(Area)\n")
for _, artist in artists.items():
out_artist.write(",".join((
artist[1],
'"' + artist[2].replace("\"", "\"\"") + '"',
artist[4] if artist[4] != "\\N" else "",
"Artist" + (";Group\n" if artist[10] == "2" else "\n")
)))
if artist[11] != "\\N":
out_artist_area.write(artist[1] + "," + areas[artist[11]][1] + "\n")
out_artist.close()
out_artist_area.close()
with open("repo/artist_artist.csv", "w") as out:
out.write(":START_ID(Artist),:END_ID(Artist),:TYPE\n")
with open("in/l_artist_artist", "r") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((
artists[cols[2]][1],
artists[cols[3]][1],
artist_artist_rel_map[link_types[links[cols[1]][1]][6]] + "\n"
)))
# --------
with open("in/release_group_primary_type") as f:
for line in f:
cols = line.split("\t")
release_types[cols[0]] = ";" + cols[1]
with open("repo/release.csv", "w") as out:
out.write("id:ID(Release),name,:LABEL\n")
with open("in/release_group") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((
cols[1],
'"' + cols[2].replace("\"", "\"\"") + '"',
"Release" + release_types[cols[4]],
)) + "\n")
release_groups[cols[0]] = cols
with open("in/release") as f:
for line in f:
cols = line.split("\t")
release_to_release_group_map[cols[0]] = cols[4]
with open("tmp/tmp_artist_release.csv", "w") as out:
out.write(":START_ID(Artist),:END_ID(Release),:TYPE\n")
with open("in/l_artist_release") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((
artists[cols[2]][1],
release_groups[release_to_release_group_map[cols[3]]][1],
artist_release_rel_map[link_types[links[cols[1]][1]][6]]
)) + "\n")
with open("repo/release_release.csv", "w") as out:
out.write(":START_ID(Release),:END_ID(Release),:TYPE\n")
with open("in/l_release_group_release_group") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((
release_groups[cols[2]][1],
release_groups[cols[3]][1],
release_release_rel_map[link_types[links[cols[1]][1]][6]]
)) + "\n")
os.system("(head -n 1 tmp/tmp_artist_release.csv && tail -n +2 tmp/tmp_artist_release.csv"
" | sort) | uniq > repo/artist_release.csv && rm tmp/tmp_artist_release.csv")
# ---
with open("in/tag") as f:
with open("repo/tag.csv", "w") as out:
out.write("id:ID(Tag),name\n")
for line in f:
cols = line.split("\t")
tags[cols[0]] = cols
out.write(cols[0] + ",\"" + cols[1].replace("\"", "\"\"") + "\"\n")
with open("repo/release_tag.csv", "w") as out:
out.write(":START_ID(Release),:END_ID(Tag),weight:int\n")
with open("in/release_group_tag") as f:
for line in f:
cols = line.split("\t")
if int(cols[2]) <= 0:
continue
out.write(",".join((
release_groups[cols[0]][1],
cols[1],
cols[2],
)) + "\n")
with open("repo/artist_tag.csv", "w") as out:
out.write(":START_ID(Artist),:END_ID(Tag),weight:int\n")
with open("in/artist_tag") as f:
for line in f:
cols = line.split("\t")
if int(cols[2]) <= 0:
continue
out.write(",".join((
artists[cols[0]][1],
cols[1],
cols[2],
)) + "\n")
with open("repo/tag_tag.csv", "w") as out:
out.write(":START_ID(Tag),:END_ID(Tag),weight\n")
with open("in/tag_relation") as f:
for line in f:
cols = line.split("\t")
if int(cols[2]) <= 0:
continue
out.write(",".join((
cols[0],
cols[1],
cols[2],
)) + "\n")
# -----
with open("repo/labels.csv", "w") as out:
out.write("id:ID(Label),name,code,:LABEL\n")
with open("in/label") as f:
for line in f:
cols = line.split("\t")
labels[cols[0]] = cols
out.write(",".join((
cols[1],
"\"" + cols[2].replace("\"", "\"\"") + "\"",
cols[9] if cols[9] != "\\N" else "",
"Label" + label_types[cols[10]]
)) + "\n")
with open("repo/label_label.csv", "w") as out:
out.write(":START_ID(Label),:END_ID(Label),:TYPE\n")
with open("in/l_label_label") as f:
for line in f:
cols = line.split("\t")
out.write(",".join((
labels[cols[2]][1],
labels[cols[3]][1],
label_label_rel_map[link_types[links[cols[1]][1]][6]]
)) + "\n")
# ---

39
generate_scrape_tasks.py Normal file
View File

@ -0,0 +1,39 @@
import csv
import json
from multiprocessing.pool import ThreadPool
from task_tracker_drone.src.tt_drone.api import TaskTrackerApi, Worker
TT_API_URL = "https://tt.simon987.net/api"
TT_PROJECT = 1
api = TaskTrackerApi(TT_API_URL)
worker = Worker.from_file(api)
if not worker:
worker = api.make_worker("last.fm scraper")
worker.dump_to_file()
worker.request_access(TT_PROJECT, True, True)
input("Give permission to " + worker.alias)
with open("repo/artist.csv") as f:
reader = csv.reader(f)
def mktask(line):
res = worker.submit_task(
project=TT_PROJECT,
recipe=json.dumps({"mbid": line[0], "name": line[1]}),
unique_str=line[0],
max_assign_time=60 * 5,
)
print(res.text)
def lines():
for line in reader:
if "Group" in line[2]:
yield line
pool = ThreadPool(processes=60)
pool.map(func=mktask, iterable=lines())

19
get_musicbrains_dump.sh Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
latest=$(curl http://ftp.musicbrainz.org/pub/musicbrainz/data/fullexport/LATEST)
mkdir in 2> /dev/null
cd in
wget -nc "http://ftp.musicbrainz.org/pub/musicbrainz/data/fullexport/${latest}/mbdump.tar.bz2"
wget -nc "http://ftp.musicbrainz.org/pub/musicbrainz/data/fullexport/${latest}/mbdump-derived.tar.bz2"
tar -xjvf mbdump.tar.bz2 mbdump/area mbdump/artist mbdump/l_area_area mbdump/l_artist_artist \
mbdump/l_artist_release mbdump/l_artist_release_group mbdump/l_label_label mbdump/l_release_group_release_group \
mbdump/label mbdump/label_type mbdump/link mbdump/link_type mbdump/release mbdump/release_group \
mbdump/release_group_primary_type
tar -xjvf mbdump-derived.tar.bz2 mbdump/artist_tag mbdump/release_group_tag mbdump/tag mbdump/tag_relation
mv mbdump/* .
rm -r mbdump
cd ..

49
import.sh Executable file
View File

@ -0,0 +1,49 @@
#!/bin/bash
export NEO4J_HOME="/home/drone/Downloads/neo4j-community-3.5.3"
export REPOSITORY="http://localhost:9999"
export DATABASE="graph.db"
rm -rf "${NEO4J_HOME}/data/databases/${DATABASE}"
mkdir workspace 2> /dev/null
cd workspace
wget ${REPOSITORY}/area.csv
wget ${REPOSITORY}/area_area.csv
wget ${REPOSITORY}/lastfm_artist.csv
wget ${REPOSITORY}/artist_area.csv
wget ${REPOSITORY}/artist_artist.csv
wget ${REPOSITORY}/artist_release.csv
wget ${REPOSITORY}/release.csv
wget ${REPOSITORY}/tag.csv
wget ${REPOSITORY}/tag_tag.csv
wget ${REPOSITORY}/release_tag.csv
wget ${REPOSITORY}/release_release.csv
wget ${REPOSITORY}/artist_tag.csv
wget ${REPOSITORY}/labels.csv
wget ${REPOSITORY}/label_label.csv
wget ${REPOSITORY}/lastfm_artist_artist.csv
. ${NEO4J_HOME}/bin/neo4j-admin import \
--database ${DATABASE}\
--high-io=true\
--nodes:Area:MusicBrainzEntity "area.csv"\
--nodes:MusicBrainzEntity "release.csv"\
--nodes:MusicBrainzEntity "lastfm_artist.csv"\
--nodes:Tag "tag.csv"\
--nodes:MusicBrainzEntity "labels.csv"\
--relationships:IS_PART_OF "area_area.csv"\
--relationships:IS_BASED_IN "artist_area.csv"\
--relationships "artist_artist.csv"\
--relationships "artist_release.csv"\
--relationships:IS_TAGGED "release_tag.csv"\
--relationships:IS_TAGGED "artist_tag.csv"\
--relationships:IS_RELATED_TO "tag_tag.csv"\
--relationships "label_label.csv"\
--relationships "release_release.csv"\
--relationships:IS_RELATED_TO "lastfm_artist_artist.csv"
rm *.csv
cd ..

1
last.fm Submodule

@ -0,0 +1 @@
Subproject commit da0874207e2e214ee72be818233e2bf2d30ded19

View File

@ -0,0 +1,74 @@
import csv
import json
import sqlite3
import sys
def patch(lastfm_data):
with sqlite3.connect(lastfm_data) as conn:
cur = conn.cursor()
cur.execute("SELECT data FROM lastfmdata", )
data = cur.fetchall()
if data:
buffer = []
dup_buf = set()
artist_listeners = dict()
artists = set()
for row in data:
lastfm_data = json.loads(row[0])
for similar in [s for s in lastfm_data["similar"] if s["mbid"] is not None]:
if (similar["mbid"], lastfm_data["artist"]) not in dup_buf:
buffer.append((
similar["mbid"],
lastfm_data["artist"],
similar["match"]
))
dup_buf.add((similar["mbid"], lastfm_data["artist"]))
dup_buf.add((lastfm_data["artist"], similar["mbid"]))
artist_listeners[lastfm_data["artist"]] = (lastfm_data["listeners"], lastfm_data["playcount"])
del dup_buf
with open("repo/lastfm_artist.csv", "w") as out:
writer = csv.writer(out)
writer.writerow([
"id:ID(Artist)", "name", ":LABEL", "listeners:int", "playcount:int"
])
with open("repo/artist.csv") as f:
reader = csv.reader(f)
reader.__next__() # Skip header
for row in reader:
writer.writerow([
row[0],
row[1],
row[2],
row[3],
artist_listeners.get(row[0], (0, 0))[0],
artist_listeners.get(row[0], (0, 0))[1],
])
artists.add(row[0])
with open("repo/lastfm_artist_artist.csv", "w") as out:
out.write(",".join((
":START_ID(Artist)", ":END_ID(Artist)", "weight"
)) + "\n")
for x in buffer:
if x[0] not in artists:
continue
if x[1] not in artists:
continue
out.write(",".join(x) + "\n")
patch(sys.argv[1])

1
requirements.txt Normal file
View File

@ -0,0 +1 @@
requests

1
task_tracker_drone Submodule

@ -0,0 +1 @@
Subproject commit 3aa187c2c47d85741afc2beec59f5c6b7110e1e7

20
ui/music_graph/.babelrc Normal file
View File

@ -0,0 +1,20 @@
{
"presets": [
[
"env",
{
"modules": false
}
]
],
"plugins": [
"lodash",
[
"component",
{
"libraryName": "element-ui",
"styleLibraryName": "theme-default"
}
]
]
}

View File

@ -0,0 +1,9 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 4
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true

View File

@ -0,0 +1,2 @@
build/*.js
config/*.js

View File

@ -0,0 +1,53 @@
// http://eslint.org/docs/user-guide/configuring
module.exports = {
root: true,
parser: 'babel-eslint',
parserOptions: {
sourceType: 'module'
},
env: {
browser: true,
},
extends: 'airbnb-base',
// required to lint *.vue files
plugins: [
'html'
],
// check if imports actually resolve
'settings': {
'import/resolver': {
'webpack': {
'config': 'build/webpack.base.js'
}
}
},
// add your custom rules here
'rules': {
// Indent with 4 spaces
"indent": ["error", 4],
"no-param-reassign": 0,
"no-underscore-dangle": 0,
// don't require .vue extension when importing
'import/extensions': ['error', 'always', {
'js': 'never',
'vue': 'never'
}],
// allow optionalDependencies
'import/no-extraneous-dependencies': ['error', {
'optionalDependencies': ['test/unit/index.js']
}],
// allow debugger during development
'no-debugger': process.env.NODE_ENV === 'production' ? 2 : 0
},
"globals": {
"describe": true,
"it": true,
"expect": true,
"window": true,
"document": true,
"__DEV__": true,
"__PROD__": true,
"__APP_MODE__": ""
},
};

View File

@ -0,0 +1,10 @@
const API_URL = '//app.example.com/api';
const API_VERSION = '2.0';
const BASE_URL = `${API_URL}/${API_VERSION}`;
exports.getURL = url => BASE_URL + url;
module.exports = {
appName: 'Some App Specific Mode',
otherAPI: exports.getURL('/something/')
};

View File

@ -0,0 +1,13 @@
const API_URL = '//app.example.com/api';
const API_VERSION = '1.0';
const BASE_URL = `${API_URL}/${API_VERSION}`;
exports.getURL = url => BASE_URL + url;
module.exports = {
appName: 'Default App',
debug: false,
sessionName: 'session_id',
credential: 'same-origin',
exampleAPI: exports.getURL('/thing/')
};

View File

@ -0,0 +1,26 @@
const path = require('path');
const API_URL = '//localhost:7000/api';
const API_VERSION = '1.0';
const BASE_URL = `${API_URL}/${API_VERSION}`;
exports.getURL = url => BASE_URL + url;
module.exports = {
appName: 'Dev App',
debug: true,
index: path.resolve(__dirname, '../dist/index.html'),
assetsRoot: path.resolve(__dirname, '../dist'),
assetsSubDirectory: 'assets',
assetsPublicPath: '/',
// Gzip off by default as many popular static hosts such as
// Surge or Netlify already gzip all static assets for you.
// Before setting to `true`, make sure to:
// npm install --save-dev compression-webpack-plugin
productionGzip: false,
productionGzipExtensions: ['js', 'css'],
// Run the build command with an extra argument to
// View the bundle analyzer report after build finishes:
// `npm run build --report`
// Set to `true` or `false` to always turn it on or off
bundleAnalyzerReport: process.env.npm_config_report,
};

View File

@ -0,0 +1,25 @@
const path = require('path');
const API_URL = '//app.production.com/api';
const API_VERSION = '2.0';
const BASE_URL = `${API_URL}/${API_VERSION}`;
exports.getURL = url => BASE_URL + url;
module.exports = {
appName: 'Prod App',
index: path.resolve(__dirname, '../dist/index.html'),
assetsRoot: path.resolve(__dirname, '../dist'),
assetsSubDirectory: 'assets',
assetsPublicPath: '/',
// Gzip off by default as many popular static hosts such as
// Surge or Netlify already gzip all static assets for you.
// Before setting to `true`, make sure to:
// npm install --save-dev compression-webpack-plugin
productionGzip: false,
productionGzipExtensions: ['js', 'css'],
// Run the build command with an extra argument to
// View the bundle analyzer report after build finishes:
// `npm run build --report`
// Set to `true` or `false` to always turn it on or off
bundleAnalyzerReport: process.env.npm_config_report,
};

View File

@ -0,0 +1,24 @@
const merge = require('webpack-merge');
const DEV = 'development';
const PROD = 'production';
const env = process.env.NODE_ENV || DEV;
const isDev = env === DEV;
const isProd = env === PROD;
const appMode = process.env.NODE_APP_MODE || 'app';
const defaults = {
env,
isProd,
isDev,
appMode,
};
const baseConfig = require('./config.base');
const envConfig = require(`./config.${env}`);
const appConfig = require(`./config.${appMode}`);
const config = merge(defaults, baseConfig, envConfig, appConfig);
module.exports = config;

12
ui/music_graph/index.html Normal file
View File

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>music_graph</title>
</head>
<body>
<div id="app"></div>
<!-- built files will be auto injected -->
</body>
</html>

10946
ui/music_graph/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,65 @@
{
"name": "music_graph",
"description": "wip",
"version": "1.0.0",
"author": "simon987 <>",
"private": true,
"scripts": {
"dev": "cross-env NODE_ENV=development NODE_APP_MODE=app webpack-dev-server --open --hot",
"build": "cross-env NODE_ENV=production NODE_APP_MODE=app webpack --progress --hide-modules",
"lint": "eslint --ext .js,.vue src test/unit/specs"
},
"dependencies": {
"d3": "^5.9.2",
"d3-force": "^2.0.1",
"vue": "^2.4.2",
"vue-router": "^2.7.0"
},
"devDependencies": {
"axios": "^0.16.2",
"babel-core": "^6.25.0",
"babel-eslint": "^7.2.3",
"babel-loader": "^7.1.1",
"babel-preset-env": "^1.6.0",
"compression-webpack-plugin": "^1.0.0",
"copy-webpack-plugin": "^4.0.1",
"cross-env": "^5.0.5",
"css-loader": "^0.28.4",
"eslint": "^4.4.1",
"eslint-loader": "^1.9.0",
"eslint-friendly-formatter": "^3.0.0",
"eslint-plugin-html": "^3.2.0",
"eslint-config-airbnb-base": "^11.3.1",
"eslint-import-resolver-webpack": "^0.8.3",
"eslint-plugin-import": "^2.7.0",
"extract-text-webpack-plugin": "^3.0.0",
"file-loader": "^0.11.2",
"element-ui": "^1.4.2",
"babel-plugin-component": "^0.10.0",
"friendly-errors-webpack-plugin": "^1.6.1",
"html-webpack-plugin": "^2.30.1",
"lodash": "^4.17.4",
"babel-plugin-lodash": "^3.2.11",
"lodash-webpack-plugin": "^0.11.4",
"node-sass": "^4.5.3",
"optimize-css-assets-webpack-plugin": "^3.0.0",
"sass-loader": "^6.0.6",
"style-loader": "^0.18.2",
"url-loader": "^0.5.9",
"vue-loader": "^13.0.4",
"vue-template-compiler": "^2.4.2",
"webpack": "^3.5.4",
"webpack-bundle-analyzer": "^2.9.0",
"webpack-dev-server": "^2.7.1",
"webpack-merge": "^4.1.0"
},
"engines": {
"node": ">= 4.0.0",
"npm": ">= 3.0.0"
},
"browserslist": [
"> 1%",
"last 2 versions",
"not ie <= 8"
]
}

View File

View File

@ -0,0 +1,16 @@
<template>
<div id="app">
<router-view></router-view>
</div>
</template>
<script>
export default {
name: 'app',
};
</script>
<style>
#app {
}
</style>

View File

@ -0,0 +1,176 @@
<template>
<div class='hello'>
</div>
</template>
<script>
import * as d3 from 'd3';
function getNodeType(labels) {
if (labels.find(l => l === 'Tag')) {
return 'Tag';
} else if (labels.find(l => l === 'Group')) {
return 'Group';
} else if (labels.find(l => l === 'Artist')) {
return 'Artist';
}
return undefined;
}
let data = {};
d3.json('../static/data.json')
.then((r) => {
data = r;
const links = data.map(row => ({
source: row._fields[1].start.low,
target: row._fields[1].end.low,
weight: row._fields[1].properties.weight.low,
}));
const nodes = [];
function addNode(node) {
if (nodes.find(n => n.id === node.id)) {
return;
}
nodes.push(node);
}
data.forEach((row) => {
addNode({
id: row._fields[0].identity.low,
name: row._fields[0].properties.name,
listeners: row._fields[0].properties.listeners.low,
type: getNodeType(row._fields[0].labels),
});
addNode({
id: row._fields[2].identity.low,
name: row._fields[2].properties.name,
type: 'Tag',
});
});
function getRadius(node) {
if (node.type === 'Tag') {
return 10;
}
return Math.max(Math.sqrt(node.listeners / 5000), 15);
}
function getColor(node) {
switch (node.type) {
case 'Tag':
return '#e0e0e0';
case 'Artist':
return '#42c3f7';
case 'Group':
return '#00a5e9';
default:
return '#DEADFB';
}
}
const width = window.innerWidth - 5;
const height = window.innerHeight - 5;
// ??
const simulation = d3.forceSimulation(nodes)
.force('link', d3.forceLink(links).id(d => d.id))
.force('charge', d3.forceManyBody())
.force('center', d3.forceCenter(width / 2, height / 2))
;
let container;
function zoomed() {
container.attr('transform', d3.event.transform);
}
function nodeZoomed() {
// TODO
}
function dragStarted(d) {
if (!d3.event.active) {
simulation.alphaTarget(0.3).restart();
}
d.fx = d.x;
d.fy = d.y;
}
function dragged(d) {
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragEnded(d) {
if (!d3.event.active) {
simulation.alphaTarget(0);
}
d.fx = null;
d.fy = null;
}
const svg = d3.select('body')
.append('svg')
.attr('width', width)
.attr('height', height);
svg.append('rect')
.attr('width', width)
.attr('height', height)
.style('pointer-events', 'all')
.style('fill', 'none')
.call(d3.zoom()
.scaleExtent([1 / 3, 5])
.on('zoom', zoomed));
document.body.setAttribute('style', 'background: #E7EDEB');
container = svg.append('g');
const link = container.append('g')
.attr('stroke', '#003a6b')
.selectAll('line')
.data(links)
.join('line')
.attr('stroke-opacity', rel => rel.weight / 15)
.attr('stroke-width', rel => Math.sqrt(rel.weight) * 0.6);
const node = container.append('g')
.attr('stroke', '#ffffff')
.attr('stroke-width', 1.5)
.selectAll('circle')
.data(nodes)
.join('circle')
.attr('r', d => getRadius(d))
.attr('fill', d => getColor(d))
.call(d3.drag()
.on('start', dragStarted)
.on('drag', dragged)
.on('end', dragEnded))
.on('wheel', nodeZoomed);
node.append('title')
.text(d => `${d.name} ${d.id}`);
simulation.on('tick', () => {
link
.attr('x1', d => d.source.x)
.attr('y1', d => d.source.y)
.attr('x2', d => d.target.x)
.attr('y2', d => d.target.y);
node
.attr('cx', d => d.x)
.attr('cy', d => d.y);
});
});
export default {
name: 'hello',
data() {
return {
msg: 'Welcome to Your Vue.js App',
};
},
};
</script>
<style scoped>
</style>

View File

@ -0,0 +1,13 @@
import Vue from 'vue';
import 'element-ui/lib/theme-default/index.css';
import App from './App';
import router from './router';
Vue.config.productionTip = false;
/* eslint-disable no-new */
new Vue({
el: '#app',
router,
render: h => h(App),
});

View File

@ -0,0 +1,15 @@
import Vue from 'vue';
import Router from 'vue-router';
import Hello from '../components/Hello';
Vue.use(Router);
export default new Router({
routes: [
{
path: '/',
name: 'Hello',
component: Hello,
},
],
});

View File

View File

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
/* eslint-disable */
const webpackMerge = require('webpack-merge');
module.exports = () => {
const env = process.env.NODE_ENV;
const baseConfig = require('./build/webpack.base');
const envConfig = require(`./build/webpack.${env}`);
return webpackMerge(baseConfig, envConfig);
};