This commit is contained in:
englehardt 2014-03-31 20:41:43 -04:00
Родитель 366c4fe791 66d0e0c21f
Коммит aac9a4c519
15 изменённых файлов: 400 добавлений и 76 удалений

Просмотреть файл

@ -11,9 +11,9 @@ def all_same(items):
def all_same_len(items):
return all(len(x) == len(items[0]) for x in items)
# Are two cookies more than 80% similar in accordance to Ratcliff-Obershelp metric
# Are two cookies more than 33% similar in accordance to Ratcliff-Obershelp metric
def ro_similar(seq1, seq2):
return difflib.SequenceMatcher(a=seq1, b=seq2).ratio() > 0.5
return difflib.SequenceMatcher(a=seq1, b=seq2).ratio() > 0.33
# Are all cookies in a list pairwise-dissimilar (i.e. fail ro-test)
def all_dissimilar(items):

Просмотреть файл

@ -8,7 +8,7 @@ from dateutil import parser
# builds a dictionary with keys = (domain, name) and values being list of cookie values
# values must be from non short-lived cookies and consistent across the crawls
# extracts from a single OpenWPM database
def extract_cookies_from_db(db_name):
def extract_cookie_candidates_from_db(db_name):
con = lite.connect(db_name)
cur = con.cursor()
@ -52,7 +52,7 @@ def add_inner_parameters(raw_cookie_dict, domain, name, value):
# an ID must appear in at least 2 different crawls (otherwise, can't make a definitive statement about it)
# prunes away cookies with lengths less than or equal to 5 (these strings are probably too short
# returns dictionary with domains as keys and cookie names as values
def extract_persistent_ids(cookie_dicts):
def extract_common_persistent_ids(cookie_dicts):
raw_id_dict = defaultdict(list) # for each cookie, a list of the values across each crawl
# combine all smaller cookie dictionaries into a larger dictionary
@ -73,7 +73,34 @@ def extract_persistent_ids(cookie_dicts):
return domain_dict
# given a dictionary of persistent ids, goes through a database
# and returns a dictionary with the persistent ids and their unique values
# in the database (for those that actually appear)
def extract_known_cookies_from_db(db_name, cookie_dict):
con = lite.connect(db_name)
cur = con.cursor()
found_cookies = {}
for domain, name, value in cur.execute('SELECT domain, name, value FROM cookies'):
domain = domain if len(domain) == 0 or domain[0] != "." else domain[1:]
# first search for most basic cookies
if domain in cookie_dict and name in cookie_dict[domain]:
found_cookies[(domain, name)] = value
# next, look for potential nested cookies
if "=" in value:
for delimiter in ["&", ":"]:
parts = value.split(delimiter)
for part in parts:
params = part.split("=")
if len(params) == 2 and name + "#" + params[0] in cookie_dict[domain]:
found_cookies[(domain, name + "#" + params[0])] = params[1]
return found_cookies
if __name__ == "__main__":
c1 = extract_cookies_from_db("/home/christian/Desktop/crawl1.sqlite")
c2 = extract_cookies_from_db("/home/christian/Desktop/crawl2.sqlite")
print len(extract_persistent_ids([c1, c2]))
c1 = extract_cookie_candidates_from_db("/home/christian/Desktop/crawl1.sqlite")
c2 = extract_cookie_candidates_from_db("/home/christian/Desktop/crawl2.sqlite")
extracted = extract_common_persistent_ids([c1, c2])
known = extract_known_cookies_from_db("/home/christian/Desktop/crawl1.sqlite", extracted)

Просмотреть файл

@ -1,69 +0,0 @@
import sqlite3 as lite
import urlparse
import census_util
from collections import defaultdict
def add_params(raw_params, domain, domain_dict):
# add the entry assuming it does not exist
if domain not in raw_params:
raw_params[domain] = defaultdict(list)
for param in domain_dict:
for value in domain_dict[param]:
raw_params[domain][param].append(value)
return raw_params
def extract_parameters_from_db(db_name):
raw_param_dict = {}
con = lite.connect(db_name)
cur = con.cursor()
#raw_cookie_dict = defaultdict(list) # cookie dict containing list of values
for url, in cur.execute('SELECT url FROM http_requests'):
domain = census_util.extract_domain(url)
query = urlparse.urlparse(url).query
if query is None:
continue
param_dict = urlparse.parse_qs(query)
if len(param_dict) == 0:
continue
raw_param_dict = add_params(raw_param_dict, domain, param_dict)
# throw away parameters that do no stay the same the entire time
param_dict = {}
for domain in raw_param_dict:
param_dict[domain] = {}
for param in raw_param_dict[domain]:
if census_util.all_same(raw_param_dict[domain][param]):
param_dict[domain][param] = raw_param_dict[domain][param][0]
return param_dict
def extract_persistent_parameters(param_dicts):
raw_param_dict = {}
for dict in param_dicts:
for domain in dict:
if domain not in raw_param_dict:
raw_param_dict[domain] = defaultdict(list)
for param in dict[domain]:
raw_param_dict[domain][param].append(dict[domain][param])
# extract same-lengthed parameter values that are also sufficiently dis-similar and long enough
param_dict = {}
for domain in raw_param_dict:
param_dict[domain] = {}
for param in raw_param_dict[domain]:
if len(raw_param_dict[domain][param]) > 1 and len(raw_param_dict[domain][param][0]) > 5 \
and census_util.all_same_len(raw_param_dict[domain][param]) \
and census_util.all_dissimilar(raw_param_dict[domain][param]):
print domain + "\t" + param + "\t" + str(raw_param_dict[domain][param])
if __name__ == "__main__":
d1 = extract_parameters_from_db("/home/christian/Desktop/crawl1.sqlite")
d2 = extract_parameters_from_db("/home/christian/Desktop/crawl2.sqlite")
extract_persistent_parameters([d1, d2])

Просмотреть файл

@ -0,0 +1,72 @@
import networkx as nx
#import matplotlib.pyplot as plt
import extract_cookie_ids
import sqlite3 as lite
import census_util
import sigma_graph_json
# builds and returns a cookie synchronization graph
# nodes are websites and (directed) edges illustrate cookie values flowing between sites
# each node and edge contains the cookies that are available to them
# only includes cookies in the graph that are present in at least 1 transfer
def build_sync_graph(db_name, known_cookies):
# first, build a dictionary that maps cookies to the value actually seen in the database
value_dict = extract_cookie_ids.extract_known_cookies_from_db(db_name, known_cookies)
g = nx.DiGraph() # cookie flow graph
con = lite.connect(db_name)
cur = con.cursor()
# iterates through all the cookie ids to look from them flowing throughout the graph
for cookie in value_dict:
query = 'SELECT url, referrer FROM http_requests WHERE url LIKE \'%' + value_dict[cookie] + '%\''
for url, referrer in cur.execute(query):
url = census_util.extract_domain(url)
referrer = census_util.extract_domain(referrer)
# adds edges and adds cookies to nodes + edges
# TODO: error with blank strings?
cookie_str = str(cookie[0]) + " " + str(cookie[1])
if url not in g:
g.add_node(url, cookies={})
if referrer not in g:
g.add_node(referrer, cookies={})
if (referrer, url) not in g.edges():
g.add_edge(referrer, url, cookies={})
g.edge[referrer][url]['cookies'][cookie_str] = 1
g.node[referrer]['cookies'][cookie_str] = 1
g.node[url]['cookies'][cookie_str] = 1
# adds the weights to the nodes and edges
# TODO: fix this, wrong under cookie dictionary scheme
for node in g.nodes(data=True):
g.node[node[0]]['weight'] = len(node[1])
for edge in g.edges(data=True):
g.edge[edge[0]][edge[1]]['weight'] = len(edge[2])
return g
# takes in a graph and adds fields to it to make it drawable in sigma.js
def add_drawable_graph_fields(G):
# remove blank node if necessary
if '' in G.nodes():
G.remove_node('')
layout = nx.spring_layout(G)
# adds coordinates to node
for node in layout:
G.add_node(node, x=float(layout[node][0]), y=float(layout[node][1]))
return G
if __name__ == "__main__":
c1 = extract_cookie_ids.extract_cookie_candidates_from_db("/home/christian/Desktop/crawl1.sqlite")
c2 = extract_cookie_ids.extract_cookie_candidates_from_db("/home/christian/Desktop/crawl2.sqlite")
extracted = extract_cookie_ids.extract_common_persistent_ids([c1, c2])
known = extract_cookie_ids.extract_known_cookies_from_db("/home/christian/Desktop/crawl1.sqlite", extracted)
G = build_sync_graph("/home/christian/Desktop/crawl1.sqlite", extracted)
G = add_drawable_graph_fields(G)
sigma_graph_json.write_json_graph_to_file(G, "/home/christian/Desktop/graph.json")

Просмотреть файл

@ -0,0 +1,65 @@
# Sigma.JS requires a very specific JSON format for its graphs
# The various python-based graph serialization libraries seem to fail for networkx graph
# This module takes in such a graph and provides an accepted JSON serialization
# TODO: add support for directed graphs
def build_node(G, node):
val = {
"id": str(node),
"label": str(node),
"x": G.node[node]['x'],
"y": G.node[node]['y'],
"size": 3,
"color": "ff0000",
"cookies": G.node[node]['cookies']
}
return str(val).replace("\'", "\"")
# takes in an edge and a counter (i.e. the id for the edge)
# returns a string corresponding to the edge
def build_edge(G, edge, counter):
val = {
"id": "e" + str(counter),
"source": str(edge[0]),
"target": str(edge[1]),
"cookies": G.edge[edge[0]][edge[1]]['cookies']
}
return str(val).replace("\'", "\"")
# Takes in a networkx graph
# Returns a json-encoded string
# Parsable by Sigma.JS
def build_json_encoding(G):
json_parts = []
json_parts.append("{\"nodes\": [")
# Adds the encoded nodes
counter = 0
num_nodes = len(G.nodes())
for node in G.nodes():
counter += 1
json_parts.append(build_node(G, node))
if counter < num_nodes:
json_parts.append(",")
# Adds the encoded edges
counter = 0
num_edges = len(G.edges())
json_parts.append("], \"edges\": [")
for edge in G.edges():
json_parts.append(build_edge(G, edge, counter))
counter += 1
if counter < num_edges:
json_parts.append(",")
json_parts.append("] }")
return "".join(json_parts)
# takes in a for-now-undirected graph and dumps in to <file_name>
def write_json_graph_to_file(G, file_name):
f = open(file_name, 'w')
f.write(build_json_encoding(G))
f.close()

22
census/www/index.html Normal file
Просмотреть файл

@ -0,0 +1,22 @@
<html>
<head>
<script src="sigma.min.js"></script>
<script src="plugins/sigma.parsers.json.min.js"></script>
<script src="util/graph_actions.js"></script>
<link rel="stylesheet" href="jquery-ui/css/smoothness/jquery-ui-1.10.4.custom.css" />
<script src="util/setup_census.js"></script>
<script src="util/ui_commands.js"></script>
<script src="jquery-ui/js/jquery-1.10.2.js"></script>
<script src="jquery-ui/js/jquery-ui-1.10.4.custom.min.js"></script>
</head>
<body>
<div id="graph" style='height:100%; width:100%'></div>
<div id="cookie_panel">
<div id="owners"></div>
<div id="cookies"></div>
</div>
<div id="weight_slider"></div>
<script> init(); </script>
</body>
</html>

1
census/www/plugins/sigma.layout.forceAtlas2.min.js поставляемый Executable file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

1
census/www/plugins/sigma.parsers.gexf.min.js поставляемый Executable file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

1
census/www/plugins/sigma.parsers.json.min.js поставляемый Executable file
Просмотреть файл

@ -0,0 +1 @@
(function(){"use strict";if("undefined"==typeof sigma)throw"sigma is not declared";sigma.utils.pkg("sigma.parsers"),sigma.utils.pkg("sigma.utils"),sigma.utils.xhr=function(){if(window.XMLHttpRequest)return new XMLHttpRequest;var a,b;if(window.ActiveXObject){a=["Msxml2.XMLHTTP.6.0","Msxml2.XMLHTTP.3.0","Msxml2.XMLHTTP","Microsoft.XMLHTTP"];for(b in a)try{return new ActiveXObject(a[b])}catch(c){}}return null},sigma.parsers.json=function(a,b,c){var d,e=sigma.utils.xhr();if(!e)throw"XMLHttpRequest not supported, cannot load the file.";e.open("GET",a,!0),e.onreadystatechange=function(){4===e.readyState&&(d=JSON.parse(e.responseText),b instanceof sigma?(b.graph.clear(),b.graph.read(d)):"object"==typeof b?(b.graph=d,b=new sigma(b)):"function"==typeof b&&(c=b,b=null),c&&c(b||d))},e.send()}}).call(this);

1
census/www/plugins/sigma.plugins.animate.min.js поставляемый Executable file
Просмотреть файл

@ -0,0 +1 @@
(function(){"use strict";function a(a){if(d[a])return d[a];var b=[0,0,0];return a.match(/^#/)?(a=(a||"").replace(/^#/,""),b=3===a.length?[parseInt(a.charAt(0)+a.charAt(0),16),parseInt(a.charAt(1)+a.charAt(1),16),parseInt(a.charAt(2)+a.charAt(2),16)]:[parseInt(a.charAt(0)+a.charAt(1),16),parseInt(a.charAt(2)+a.charAt(3),16),parseInt(a.charAt(4)+a.charAt(5),16)]):a.match(/^ *rgba? *\(/)&&(a=a.match(/^ *rgba? *\( *([0-9]*) *, *([0-9]*) *, *([0-9]*) *(,.*)?\) *$/),b=[+a[1],+a[2],+a[3]]),d[a]={r:b[0],g:b[1],b:b[2]},d[a]}function b(b,c,d){b=a(b),c=a(c);var e={r:b.r*(1-d)+c.r*d,g:b.g*(1-d)+c.g*d,b:b.b*(1-d)+c.b*d};return"rgb("+[0|e.r,0|e.g,0|e.b].join(",")+")"}if("undefined"==typeof sigma)throw"sigma is not declared";sigma.utils.pkg("sigma.plugins");var c=0,d={};sigma.plugins.animate=function(a,d,e){function f(){var c=(sigma.utils.dateNow()-k)/i;c>=1?(a.graph.nodes().forEach(function(a){for(var b in d)b in d&&(a[b]=a[d[b]])}),a.refresh(),"function"==typeof g.onComplete&&g.onComplete()):(c=j(c),a.graph.nodes().forEach(function(a){for(var e in d)e in d&&(a[e]=e.match(/color$/)?b(l[a.id][e],a[d[e]],c):a[d[e]]*c+l[a.id][e]*(1-c))}),a.refresh(),a.animations[h]=requestAnimationFrame(f))}var g=e||{},h=++c,i=g.duration||a.settings("animationsTime"),j="string"==typeof g.easing?sigma.utils.easings[g.easing]:"function"==typeof g.easing?g.easing:sigma.utils.easings.quadraticInOut,k=sigma.utils.dateNow(),l=a.graph.nodes().reduce(function(a,b){var c;a[b.id]={};for(c in d)c in b&&(a[b.id][c]=b[c]);return a},{});a.animations=a.animations||Object.create({}),sigma.plugins.kill(a),f()},sigma.plugins.kill=function(a){for(var b in a.animations||{})cancelAnimationFrame(a.animations[b])}}).call(window);

1
census/www/plugins/sigma.plugins.neighborhoods.min.js поставляемый Executable file
Просмотреть файл

@ -0,0 +1 @@
(function(){"use strict";if("undefined"==typeof sigma)throw"sigma is not declared";sigma.classes.graph.addMethod("neighborhood",function(a){var b,c,d,e,f,g={},h={},i={nodes:[],edges:[]};if(!this.nodes(a))return i;e=this.nodes(a),f={},f.center=!0;for(b in e)f[b]=e[b];g[a]=!0,i.nodes.push(f);for(b in this.allNeighborsIndex[a]){g[b]||(g[b]=!0,i.nodes.push(this.nodesIndex[b]));for(c in this.allNeighborsIndex[a][b])h[c]||(h[c]=!0,i.edges.push(this.edgesIndex[c]))}for(b in g)if(b!==a)for(c in g)if(c!==a&&b!==c&&this.allNeighborsIndex[b][c])for(d in this.allNeighborsIndex[b][c])h[d]||(h[d]=!0,i.edges.push(this.edgesIndex[d]));return i}),sigma.utils.pkg("sigma.plugins"),sigma.plugins.neighborhoods=function(){var a=new sigma.classes.graph;this.neighborhood=function(b){return a.neighborhood(b)},this.load=function(b,c){var d=function(){if(window.XMLHttpRequest)return new XMLHttpRequest;var a,b;if(window.ActiveXObject){a=["Msxml2.XMLHTTP.6.0","Msxml2.XMLHTTP.3.0","Msxml2.XMLHTTP","Microsoft.XMLHTTP"];for(b in a)try{return new ActiveXObject(a[b])}catch(c){}}return null}();if(!d)throw"XMLHttpRequest not supported, cannot load the data.";return d.open("GET",b,!0),d.onreadystatechange=function(){4===d.readyState&&(a.clear().read(JSON.parse(d.responseText)),c&&c())},d.send(),this},this.read=function(b){a.clear().read(b)}}}).call(window);

1
census/www/plugins/sigma.renderers.customShapes.min.js поставляемый Executable file
Просмотреть файл

@ -0,0 +1 @@
(function(){"use strict";var a=[],b=function(b,c,d){a.push({name:b,drawShape:c,drawBorder:d})},c=function(){return a},d=function(a){return function(b,c,d,e,f,g){g.fillStyle=f,g.beginPath(),a(b,c,d,e,g),g.closePath(),g.fill()}},e=function(a){return function(b,c,d,e,f,g){g.strokeStyle=f,g.lineWidth=e/5,g.beginPath(),a(b,c,d,e,g),g.closePath(),g.stroke()}},f=function(a,b,c,d,e){var f=45*Math.PI/180;e.moveTo(b+d*Math.sin(f),c-d*Math.cos(f));for(var g=1;4>g;g++)e.lineTo(b+Math.sin(f+2*Math.PI*g/4)*d,c-Math.cos(f+2*Math.PI*g/4)*d)};b("square",d(f),e(f));var g=function(a,b,c,d,e){e.arc(b,c,d,0,2*Math.PI,!0)};b("circle",d(g),e(g));var h=function(a,b,c,d,e){e.moveTo(b-d,c),e.lineTo(b,c-d),e.lineTo(b+d,c),e.lineTo(b,c+d)};b("diamond",d(h),e(h));var i=function(a,b,c,d,e){var f=a.equilateral&&a.equilateral.numPoints||5,g=(a.equilateral&&a.equilateral.rotate||0)*Math.PI/180,h=d;e.moveTo(b+h*Math.sin(g),c-h*Math.cos(g));for(var i=1;f>i;i++)e.lineTo(b+Math.sin(g+2*Math.PI*i/f)*h,c-Math.cos(g+2*Math.PI*i/f)*h)};b("equilateral",d(i),e(i));var j=function(a,b,c,d,e){var f=a.star&&a.star.numPoints||5,g=a.star&&a.star.innerRatio||.5,h=d,i=d*g,j=Math.PI/f;e.moveTo(b,c-d);for(var k=0;f>k;k++)e.lineTo(b+Math.sin(j+2*Math.PI*k/f)*i,c-Math.cos(j+2*Math.PI*k/f)*i),e.lineTo(b+Math.sin(2*Math.PI*(k+1)/f)*h,c-Math.cos(2*Math.PI*(k+1)/f)*h)};b("star",d(j),e(j));var k=function(a,b,c,d,e,f){f.fillStyle="yellow",f.beginPath(),f.arc(b,c,d,1.25*Math.PI,0,!1),f.arc(b,c,d,0,.75*Math.PI,!1),f.lineTo(b,c),f.closePath(),f.fill(),f.fillStyle="white",f.strokeStyle="black",f.beginPath(),f.arc(b+d/3,c-d/3,d/4,0,2*Math.PI,!1),f.closePath(),f.fill(),f.stroke(),f.fillStyle="black",f.beginPath(),f.arc(b+4*d/9,c-d/3,d/8,0,2*Math.PI,!1),f.closePath(),f.fill()};b("pacman",k,null),this.ShapeLibrary={enumerate:c,version:"0.1"}}).call(this),function(a){"use strict";if("undefined"==typeof sigma)throw"sigma is not declared";if("undefined"==typeof ShapeLibrary)throw"ShapeLibrary is not declared";sigma.utils.pkg("sigma.canvas.nodes");var b=a,c={},d=function(a){b=a},e=function(a,d,e,f,g){if(b&&a.image&&a.image.url){var h=a.image.url,i=a.image.h||1,j=a.image.w||1,k=a.image.scale||1,l=a.image.clip||1,m=c[h];m||(m=document.createElement("IMG"),m.src=h,m.onload=function(){console.log("redraw on image load"),b.refresh()},c[h]=m);var n=i>j?j/i:1,o=j>i?i/j:1,p=f*k;g.save(),g.beginPath(),g.arc(d,e,f*l,0,2*Math.PI,!0),g.closePath(),g.clip(),g.drawImage(m,d+Math.sin(-0.7855)*p*n,e-Math.cos(-0.7855)*p*o,p*n*2*Math.sin(-0.7855)*-1,p*o*2*Math.cos(-0.7855)),g.restore()}},f=function(a,b,c){sigma.canvas.nodes[a]=function(a,d,f){var g=f("prefix")||"",h=a[g+"size"],i=a.color||f("defaultNodeColor"),j=a.borderColor||i,k=a[g+"x"],l=a[g+"y"];d.save(),b&&b(a,k,l,h,i,d),c&&c(a,k,l,h,j,d),e(a,k,l,h,d),d.restore()}};ShapeLibrary.enumerate().forEach(function(a){f(a.name,a.drawShape,a.drawBorder)}),this.CustomShapes={init:d,version:"0.1"}}.call(this);

4
census/www/sigma.min.js поставляемый Executable file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -0,0 +1,145 @@
var curr_clicked = null; // currently clicked node
var curr_cookies = null; // list of cookies held at currently clicked node
var highlighted = "ff0000"; // color to highlight node
var faded = "fffaf0"; // color for faded out nodes
// dummy function: colors a node gray
function hover_node(n) {
// either we are not clicking on a node or we are hovering over that node
// also, ignore nodes that are not currently highlighed
if (curr_clicked == null || n.data.node.id == curr_clicked || n.data.node.color != highlighted) {
return;
}
// try to find the common cookies
common_cookies = [];
curr_cookies.forEach(function (c) {
if (c in n.data.node.cookies) {
common_cookies.push(c);
}
});
common_cookies.sort();
console.log(common_cookies);
fill_cookie_data(n.data.node.id);
s.refresh();
}
function unhover_node(n) {
if (curr_clicked == null) {
return;
}
fill_cookie_data(null);
}
function click_stage(stage) {
reset_settings(stage);
s.refresh();
}
// sets the graph to its original coloring
function reset_settings(stage) {
s.graph.nodes().forEach(function(n) {
n.color = n.original_color;
});
s.graph.edges().forEach(function(e) {
e.color = e.original_color;
});
}
function click_node(e) {
if (e.data.node.id == curr_clicked) {
return;
}
color_flow(e);
fill_cookie_data(null);
}
// used for clicking, colors all nodes and edges that share a common cookie
// with the currently clicked node
function color_flow(e) {
// gets the cookies placed at this node
cookies = Object.keys(e.data.node.cookies);
curr_clicked = e.data.node.id;
curr_cookies = cookies;
// color all nodes that have a cookie shared with this node
s.graph.nodes().forEach(function(n) {
cookies.some(function(c) {
if (c in n.cookies) {
n.color = highlighted;
}
else {
n.color = faded;
}
});
});
// next, color the edges
s.graph.edges().forEach(function(e) {
cookies.some(function(c) {
if (c in e.cookies) {
e.color = highlighted;
}
else {
e.color = faded;
}
});
});
s.refresh();
}
function fill_cookie_data(hovered_node) {
if (hovered_node == null) {
$("#owners").html(s.graph.nodes(curr_clicked).label);
// in this case, we fill in all of the current cookies
owned_cookies = "";
curr_cookies.forEach(function(c) {
owned_cookies += c + "</br>";
});
$("#cookies").html(owned_cookies);
}
else {
console.log(s.graph.nodes(hovered_node).label);
$("#owners").html(s.graph.nodes(curr_clicked).label + " and " + s.graph.nodes(hovered_node).label);
owned_cookies = "";
curr_cookies.forEach(function(c) {
if (c in s.graph.nodes(hovered_node).cookies) {
owned_cookies += c + "</br>";
}
});
$("#cookies").html(owned_cookies);
}
}
function filter_out_low_weights(threshold_weight) {
// first fade out the low-weight nodes
s.graph.nodes().forEach(function(n) {
if (n.weight < threshold_weight) {
n.color = faded;
}
else {
n.color = highlighted;
}
});
// next, fade out the edges with at least one faded node
s.graph.edges().forEach(function(e) {
if (s.graph.nodes(e.source).color == faded
|| s.graph.nodes(e.target).color == faded) {
e.color = faded;
}
else {
e.color = highlighted;
}
});
s.refresh();
}

Просмотреть файл

@ -0,0 +1,52 @@
var base_color = "ff0000"; // standard node for colors
function init() {
// setup the graph
s = new sigma(document.getElementById('graph'));
sigma.parsers.json(
'graph.json',
s,
function() {
max_weight = 0; // max weight of a node
// save the original color of the graph for later re-coloring
// also, save the weights for each node and edge
s.graph.nodes().forEach(function(n) {
n.color = base_color;
n.original_color = n.color;
n.weight = Object.keys(n.cookies).length;
if (n.weight > max_weight) {
max_weight = n.weight;
}
});
s.graph.edges().forEach(function(e) {
e.color = base_color;
e.original_color = e.color;
e.weight = Object.keys(e.cookies).length;
});
s.refresh();
// set up the ui
$("#weight_slider").slider({
range: "max",
min: 0,
max: max_weight,
value: max_weight / 2,
slide: function(event, ui) {
filter_out_low_weights(ui.value);
}
});
});
// bind actions from graph_actions.js
s.bind('overNode', hover_node);
s.bind('outNode', unhover_node);
s.bind('clickStage', click_stage);
s.bind('clickNode', click_node);
}