* demo: Bump some React versions
not using the latest because some things are broken like props and react-scripts using TypeScript.
* demo: Add message about resetting account on error.
* demo/nb: Correct test to have increase gasUsed max.
* demo: Avoid some BN usage when deploying models. I'm not sure why this broke.
* demo/ncc: Handle sparse version with centroids as arrays.
This commit is contained in:
Justin D. Harris 2020-11-04 14:39:25 -05:00 коммит произвёл GitHub
Родитель dfcbe24551
Коммит 9919db8b96
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
18 изменённых файлов: 4703 добавлений и 3483 удалений

Просмотреть файл

@ -86,11 +86,14 @@ yarn blockchain
```
Do once:
* Add http://localhost:7545 to MetaMask.
* Copy the first private key output.
* In your browser, you will need to add http://localhost:7545 as a custom RPC, you can use the MetaMask extension for this.
* If required, you can set the chain ID to: 0xDeCA10B
* Copy the first private key output by the above command.
* Use that private key to create a new account in MetaMask.
## Server (Optional)
## Server
This is used by default in development mode but not in production.
If you want to store meta-data in a local database file instead of just within the browser, then start the server in one terminal.
This step allows you to see models listed when you open the dashboard for the first time.

Просмотреть файл

@ -3,6 +3,7 @@
# Service Storage
# We do not want to enable the data storage service because we do not want to manage that data storage.
# Default is enabled.
# Disable the data store for a simple production environment.
REACT_APP_ENABLE_SERVICE_DATA_STORE=false
# Online Safety
@ -11,4 +12,5 @@ REACT_APP_ENABLE_SERVICE_DATA_STORE=false
# Configure in: client/src/safety/config.ts
# Default is disabled.
# Enable online safety for a simple production environment.
REACT_APP_ENABLE_ONLINE_SAFETY=true

Просмотреть файл

@ -1,4 +1,7 @@
# See descriptions in `.env`.
REACT_APP_ENABLE_SERVICE_DATA_STORE=false
# Use the data storage because it is helpful in development.
REACT_APP_ENABLE_SERVICE_DATA_STORE=true
# Disable online safety because it makes development more difficult.
REACT_APP_ENABLE_ONLINE_SAFETY=false

Просмотреть файл

@ -43,7 +43,7 @@ module.exports = async function (deployer) {
console.log(`Deploying IMDB model with ${weights.length} weights.`);
const intercept = convertNum(model.intercept || model.bias, web3, toFloat);
const learningRate = convertNum(0.5, web3, toFloat);
const learningRate = convertNum(model.learningRate, web3, toFloat);
console.log(`Deploying DataHandler.`);
return deployer.deploy(DataHandler64).then(dataHandler => {

Просмотреть файл

@ -1,6 +1,6 @@
{
"name": "decai-demo-client",
"version": "1.3.0",
"version": "1.4.0",
"license": "MIT",
"private": true,
"proxy": "http://localhost:5387/",
@ -14,8 +14,8 @@
"@types/jest": "^24.0.23",
"@types/murmurhash-js": "^1.0.3",
"@types/node": "^12.12.14",
"@types/react": "^16.9.14",
"@types/react-dom": "^16.9.4",
"@types/react": "^16.9.55",
"@types/react-dom": "^16.9.9",
"axios": "^0.19.0",
"blueimp-load-image": "^2.24.0",
"canvas": "^2.6.0",
@ -24,18 +24,17 @@
"moment": "^2.24.0",
"murmurhash-js": "^1.0.0",
"notistack": "^0.9.7",
"react": "^16.12.0",
"react-addons-update": "^15.6.2",
"react-dom": "^16.12.0",
"react": "^16.14.0",
"react-dom": "^16.14.0",
"react-dropzone": "^10.2.1",
"react-router-dom": "^5.1.2",
"react-scripts": "^3.2.0",
"react-scripts": "^3.4.4",
"react-spinners": "^0.6.1",
"serve": "^11.2.0",
"truffle": "^5.1.12",
"truffle": "^5.1.50",
"typescript": "^3.7.3",
"web3": "^1.2.6",
"web3-eth-contract": "^1.2.6"
"web3": "^1.3.0",
"web3-eth-contract": "^1.3.0"
},
"scripts": {
"start": "bash deploy_client.sh",

Просмотреть файл

@ -1097,8 +1097,8 @@ class Model extends React.Component {
}
})
.on('error', err => {
console.error(err);
this.notify("Error adding data. See the console for details.", { variant: 'error' })
console.error(err)
this.notify("Error adding data. See the console for details. If you were using a private network, it might help to \"Reset Account\" in your browser or extension (MetaMask).", { variant: 'error' })
});
});
}

Просмотреть файл

@ -16,7 +16,7 @@ import DeleteIcon from '@material-ui/icons/Delete';
import { withSnackbar } from 'notistack';
import PropTypes from 'prop-types';
import React from 'react';
import update from 'react-addons-update';
import update from 'immutability-helper';
import { checkStorages } from '../components/storageSelector';
import { getNetworkType } from '../getWeb3';
import { OnlineSafetyValidator } from '../safety/validator';
@ -237,7 +237,7 @@ class ModelList extends React.Component {
{serviceStorageEnabled ? " or if they are listed on a centralized database" : ""}.
</Typography>
<Typography component="p">
You can deploy your own model <Link href='/addModel'>here</Link> or use an already deployed model by filling in the information <Link href='/addDeployedModel'>here</Link>.
You can deploy your own model <Link href='/add'>here</Link> or use an already deployed model by filling in the information <Link href='/addDeployedModel'>here</Link>.
</Typography>
</div>
{this.state.loadingModels ?

Просмотреть файл

@ -1,7 +1,9 @@
const _toFloat = 1E9;
export function convertNum(num, web3, toFloat = _toFloat) {
return web3.utils.toBN(Math.round(num * toFloat));
export function convertNum(num, _web3, toFloat = _toFloat) {
// We used to convert to BN here
// but it caused problems and inconsistencies when running tests vs. running in the browser (using MetaMask).
return Math.round(num * toFloat);
}
export function convertToHex(num, web3, toFloat = _toFloat) {

Просмотреть файл

@ -2,35 +2,32 @@ import assert from 'assert'
import Web3 from 'web3'
import { convertNum } from '../../float-utils'
import { ModelDeployer } from '../deploy-model'
import { CentroidInfo, DensePerceptronModel, NaiveBayesModel, NearestCentroidModel, SparseCentroidInfo, SparseNearestCentroidModel, SparsePerceptronModel } from '../model-interfaces'
import { CentroidInfo, DensePerceptronModel, Model, NaiveBayesModel, NearestCentroidModel, SparseCentroidInfo, SparseNearestCentroidModel, SparsePerceptronModel } from '../model-interfaces'
declare const web3: Web3
function assertEqualNumbers(actual: any, expected: any, message?: string | Error): void {
if (message) {
message += "\n"
}
if (web3.utils.isBN(actual)) {
if (web3.utils.isBN(expected)) {
if (message === undefined) {
message = `actual: ${actual} (${typeof actual})\nexpected: ${expected} (${typeof expected})`
}
message = `${message || ""}actual: ${actual} (BN)\nexpected: ${expected} (BN)`
return assert(actual.eq(expected), message)
} else {
const expectedBN = web3.utils.toBN(expected)
if (message === undefined) {
message = `actual: ${actual} (${typeof actual})\nexpected: ${expected} (${typeof expected}) => BN: ${expectedBN}`
}
message = `${message || ""}actual: ${actual} (BN)\nexpected: ${expected} (${typeof expected}) => BN: ${expectedBN}`
return assert(actual.eq(expectedBN), message)
}
} else if (web3.utils.isBN(expected)) {
const actualBN = web3.utils.toBN(actual)
if (message === undefined) {
message = `actual: ${actual} (${typeof actual}) => BN: ${actualBN}\nexpected: ${expected} (${typeof expected})`
}
message = `${message || ""}actual: ${actual} (${typeof actual}) => BN: ${actualBN}\nexpected: ${expected} (BN)`
return assert(actualBN.eq(expected), message)
} else {
if (typeof actual === 'string') {
actual = parseInt(actual)
}
return assert.equal(actual, expected, message)
return assert.strictEqual(actual, expected, message)
}
}
@ -68,8 +65,9 @@ describe("ModelDeployer", () => {
account,
})
assertEqualNumbers(await m.methods.smoothingFactor().call(), convertNum(model.smoothingFactor, web3), "smoothingFactor")
for (let i = 0; i < model.classifications.length; ++i) {
assert.equal(await m.methods.classifications(i).call(), model.classifications[i])
assert.strictEqual(await m.methods.classifications(i).call(), model.classifications[i])
assertEqualNumbers(await m.methods.getNumSamples(i).call(), model.classCounts[i])
for (const [featureIndex, count] of model.featureCounts[i]) {
assertEqualNumbers(await m.methods.getFeatureCount(i, featureIndex).call(), count)
@ -83,8 +81,8 @@ describe("ModelDeployer", () => {
const model = new NearestCentroidModel(
'dense nearest centroid classifier',
{
"AA": new CentroidInfo([-1, -1], 2),
"BB": new CentroidInfo([+1, +1], 2),
"AA": new CentroidInfo([-1, -1, 4.88, -8.44, -3], 2),
"BB": new CentroidInfo([+1, -1.8, 9.07, 3, -3], 2),
}
)
const m = await deployer.deployModel(
@ -96,20 +94,21 @@ describe("ModelDeployer", () => {
let i = -1
for (let [classification, centroidInfo] of Object.entries(model.centroids)) {
++i
assert.equal(await m.methods.classifications(i).call(), classification)
assert.strictEqual(await m.methods.classifications(i).call(), classification)
assertEqualNumbers(await m.methods.getNumSamples(i).call(), centroidInfo.dataCount)
for (let j = 0; j < centroidInfo.centroid.length; ++j) {
assertEqualNumbers(await m.methods.getCentroidValue(i, j).call(), convertNum(centroidInfo.centroid[j], web3))
const actual = await m.methods.getCentroidValue(i, j).call()
assertEqualNumbers(actual, convertNum(centroidInfo.centroid[j], web3), `centroid value for class ${i}[${j}]`)
}
}
})
it("should deploy sparse Nearest Centroid", async () => {
// Values should all be positive since the representation is sparse.
const model = new SparseNearestCentroidModel(
'sparse nearest centroid classifier',
{
"AA": new SparseCentroidInfo({ '0': 0, '1': +1, '7': 1 }, 2),
// Values should all be positive since the representation is sparse.
"AA": new SparseCentroidInfo({ '0': 0, '1': +1, '7': 1.5, }, 2),
"BB": new SparseCentroidInfo({ '0': +1, '1': 0, '5': 0.5 }, 2),
}
)
@ -122,17 +121,50 @@ describe("ModelDeployer", () => {
let i = -1
for (let [classification, centroidInfo] of Object.entries(model.centroids)) {
++i
assert.equal(await m.methods.classifications(i).call(), classification)
assert.strictEqual(await m.methods.classifications(i).call(), classification)
assertEqualNumbers(await m.methods.getNumSamples(i).call(), centroidInfo.dataCount)
for (const [featureIndex, value] of Object.entries(centroidInfo.centroid)) {
assertEqualNumbers(await m.methods.getCentroidValue(i, featureIndex).call(), convertNum(value, web3))
assertEqualNumbers(await m.methods.getCentroidValue(i, featureIndex).call(), convertNum(value, web3), `centroid value for class ${i}[${featureIndex}]`)
}
}
})
it("should deploy sparse Nearest Centroid with array centroids", async () => {
// This shouldn't happen but it could if a model gets exported from the Python code
// and the type is set correctly.
const model = {
type: 'sparse nearest centroid classifier',
centroids: {
"AA": {
centroid: [0, 1, 1.5, 2, 87.88],
dataCount: 2
},
"BB": {
centroid: [1, 0, 0.5, 3.787],
dataCount: 2
},
}
}
const m = await deployer.deployModel(
model as Model,
{
account,
})
let i = -1
for (let [classification, centroidInfo] of Object.entries(model.centroids)) {
++i
assert.strictEqual(await m.methods.classifications(i).call(), classification)
assertEqualNumbers(await m.methods.getNumSamples(i).call(), centroidInfo.dataCount)
for (const [featureIndex, value] of Object.entries(centroidInfo.centroid)) {
assertEqualNumbers(await m.methods.getCentroidValue(i, featureIndex).call(), convertNum(value, web3), `centroid value for class ${i}[${featureIndex}]`)
}
}
})
it("should deploy dense Perceptron", async () => {
const classifications = ["A", "B"]
const weights = [1, -1]
const weights = [1, -1, 2.33, -8.66]
const intercept = 0
const m = await deployer.deployModel(
new DensePerceptronModel(
@ -146,7 +178,7 @@ describe("ModelDeployer", () => {
})
for (let i = 0; i < classifications.length; ++i) {
assert.equal(await m.methods.classifications(i).call(), classifications[i])
assert.strictEqual(await m.methods.classifications(i).call(), classifications[i])
}
for (let i = 0; i < weights.length; ++i) {
assertEqualNumbers(await m.methods.weights(i).call(), convertNum(weights[i], web3))
@ -156,8 +188,8 @@ describe("ModelDeployer", () => {
it("should deploy sparse Perceptron", async () => {
const classifications = ["AA", "BB"]
const weights = [2, -2]
const sparseWeights = { '4': 7, '11': 8, }
const weights = [2, -2, 2.44, -7.55, 0.537080412, 2000, -23232.32]
const sparseWeights = null
const intercept = 3
const m = await deployer.deployModel(
new SparsePerceptronModel(
@ -171,14 +203,52 @@ describe("ModelDeployer", () => {
})
for (let i = 0; i < classifications.length; ++i) {
assert.equal(await m.methods.classifications(i).call(), classifications[i])
assert.strictEqual(await m.methods.classifications(i).call(), classifications[i])
}
assertEqualNumbers(await m.methods.intercept().call(), convertNum(intercept, web3), "intercept:")
assertEqualNumbers(await m.methods.learningRate().call(), convertNum(0.5, web3), "learningRate:")
for (let i = 0; i < weights.length; ++i) {
assertEqualNumbers(await m.methods.weights(i).call(), convertNum(weights[i], web3))
assertEqualNumbers(await m.methods.weights(i).call(), convertNum(weights[i], web3), `weight ${i}:`)
}
for (const [featureIndex, weight] of Object.entries(sparseWeights)) {
assertEqualNumbers(await m.methods.weights(featureIndex).call(), convertNum(weight, web3))
})
it("should deploy sparse Perceptron - with sparseWeights", async () => {
const classifications = ["AA", "BB"]
const weights = [2, -2, 2.44, -7.55, -3]
const sparseWeights = { '8': 7, '11': 8, '12': 8.21, '15': -4.55, '17': -3 }
const intercept = 3
const m = await deployer.deployModel(
new SparsePerceptronModel(
'sparse perceptron',
classifications,
weights, sparseWeights,
intercept,
),
{
account,
// notify: console.debug,
})
for (let i = 0; i < classifications.length; ++i) {
assert.strictEqual(await m.methods.classifications(i).call(), classifications[i])
}
assertEqualNumbers(await m.methods.intercept().call(), convertNum(intercept, web3), "intercept:")
assertEqualNumbers(await m.methods.learningRate().call(), convertNum(0.5, web3), "learningRate:")
for (let i = 0; i < weights.length; ++i) {
const actual = await m.methods.weights(i).call()
assertEqualNumbers(actual, convertNum(weights[i], web3), `weight ${i}:`)
}
for (const [featureIndexKey, weight] of Object.entries(sparseWeights)) {
const featureIndex = parseInt(featureIndexKey, 10)
const actual = await m.methods.weights(featureIndex).call()
assertEqualNumbers(actual, convertNum(weight, web3), `sparseWeight ${featureIndex}:`)
}
assertEqualNumbers(await m.methods.intercept().call(), convertNum(intercept, web3))
})
})

Просмотреть файл

@ -17,6 +17,12 @@ async function deployDensePerceptron(model, web3, toFloat) {
const weights = convertData(model.weights, web3, toFloat)
const intercept = convertNum(model.intercept || model.bias, web3, toFloat)
const learningRate = convertNum(model.learningRate || 1, web3, toFloat)
// TODO Handle feature indices.
if (model.featureIndices) {
throw new Error("featureIndices are not supported yet.")
}
console.log(` Deploying Dense Perceptron classifier with first ${Math.min(weights.length, weightChunkSize)} weights.`)
const classifierContract = await DensePerceptron.new(classifications, weights.slice(0, weightChunkSize), intercept, learningRate)
gasUsed += (await web3.eth.getTransactionReceipt(classifierContract.transactionHash)).gasUsed
@ -43,6 +49,20 @@ async function deploySparsePerceptron(model, web3, toFloat) {
const weights = convertData(model.weights, web3, toFloat)
const intercept = convertNum(model.intercept || model.bias, web3, toFloat)
const learningRate = convertNum(model.learningRate || 1, web3, toFloat)
const sparseWeights = []
// TODO Handle feature indices.
if (model.featureIndices) {
throw new Error("featureIndices are not supported yet.")
}
if (typeof model.sparseWeights === 'object') {
for (let [featureIndexKey, weight] of Object.entries(model.sparseWeights)) {
const featureIndex = parseInt(featureIndexKey, 10)
sparseWeights.push([featureIndex, convertNum(weight, web3, toFloat)])
}
}
console.log(` Deploying Sparse Perceptron classifier with first ${Math.min(weights.length, weightChunkSize)} weights...`)
const classifierContract = await SparsePerceptron.new(classifications, weights.slice(0, weightChunkSize), intercept, learningRate)
let gasUsed = (await web3.eth.getTransactionReceipt(classifierContract.transactionHash)).gasUsed
@ -55,6 +75,14 @@ async function deploySparsePerceptron(model, web3, toFloat) {
gasUsed += r.receipt.gasUsed
}
const sparseWeightsChunkSize = Math.round(weightChunkSize / 2)
for (let i = 0; i < sparseWeights.length; i += sparseWeightsChunkSize) {
const r = await classifierContract.initializeSparseWeights(
sparseWeights.slice(i, i + sparseWeightsChunkSize))
console.debug(` Added sparse classifier weights [${i},${Math.min(i + sparseWeightsChunkSize, sparseWeights.length)}) out of ${sparseWeights.length}. gasUsed: ${r.receipt.gasUsed}`)
gasUsed += r.receipt.gasUsed
}
console.log(` Deployed Sparse Perceptron classifier to ${classifierContract.address}. gasUsed: ${gasUsed}`)
return {
@ -205,11 +233,14 @@ async function deployNaiveBayes(model, web3, toFloat) {
}
/**
* @param model A model object or a string for the path to a JSON model file.
* @returns The contract for the model, an instance of `Classifier64`
* along with the the total amount of gas used to deploy the model.
*/
exports.deployModel = async function (path, web3, toFloat = _toFloat) {
const model = JSON.parse(fs.readFileSync(path, 'utf8'))
exports.deployModel = async function (model, web3, toFloat = _toFloat) {
if (typeof model === 'string') {
model = JSON.parse(fs.readFileSync(model, 'utf8'))
}
switch (model.type) {
case 'dense perceptron':
return deployDensePerceptron(model, web3, toFloat)

Просмотреть файл

@ -5,7 +5,7 @@ import NaiveBayesClassifier from '../contracts/compiled/NaiveBayesClassifier.jso
import NearestCentroidClassifier from '../contracts/compiled/NearestCentroidClassifier.json'
import SparseNearestCentroidClassifier from '../contracts/compiled/SparseNearestCentroidClassifier.json'
import SparsePerceptron from '../contracts/compiled/SparsePerceptron.json'
import { convertDataToHex, convertToHex } from '../float-utils'
import { convertData, convertNum } from '../float-utils'
import { DensePerceptronModel, Model, NaiveBayesModel, NearestCentroidModel, SparseNearestCentroidModel, SparsePerceptronModel } from './model-interfaces'
export class ModelDeployer {
@ -42,7 +42,7 @@ export class ModelDeployer {
const initialFeatureChunkSize = 150
const featureChunkSize = 350
const { classifications, classCounts, featureCounts, totalNumFeatures } = model
const smoothingFactor = convertToHex(model.smoothingFactor || defaultSmoothingFactor, this.web3, toFloat)
const smoothingFactor = convertNum(model.smoothingFactor || defaultSmoothingFactor, this.web3, toFloat)
const ContractInfo = ModelDeployer.modelTypes[model.type]
const contract = new this.web3.eth.Contract(ContractInfo.abi, undefined, { from: account })
@ -119,8 +119,8 @@ export class ModelDeployer {
for (let [classification, centroidInfo] of Object.entries(model.centroids)) {
classifications.push(classification)
dataCounts.push(centroidInfo.dataCount)
if (Array.isArray(centroidInfo.centroid)) {
centroids.push(convertDataToHex(centroidInfo.centroid, this.web3, toFloat))
if (Array.isArray(centroidInfo.centroid) && model.type !== 'sparse nearest centroid classifier') {
centroids.push(convertData(centroidInfo.centroid, this.web3, toFloat))
if (numDimensions === null) {
numDimensions = centroidInfo.centroid.length
} else {
@ -130,9 +130,10 @@ export class ModelDeployer {
}
} else {
const sparseCentroid: number[][] = []
// `centroidInfo.centroid` could be an array or dict.
for (let [featureIndexKey, value] of Object.entries(centroidInfo.centroid)) {
const featureIndex = parseInt(featureIndexKey)
sparseCentroid.push([this.web3.utils.toHex(featureIndex), convertToHex(value, this.web3, toFloat)])
sparseCentroid.push([featureIndex, convertNum(value, this.web3, toFloat)])
}
centroids.push(sparseCentroid as any)
}
@ -179,14 +180,14 @@ export class ModelDeployer {
// Extend each class.
for (let classification = 0; classification < classifications.length; ++classification) {
for (let j = initialChunkSize; j < centroids[classification].length; j += chunkSize) {
const notification = notify(`Please accept the prompt to upload the values for dimensions [${j},${j + chunkSize}) for the "${classifications[classification]}" class`)
const notification = notify(`Please accept the prompt to upload the values for dimensions [${j},${Math.min(j + chunkSize, centroids[classification].length)}) for the "${classifications[classification]}" class`)
// Not parallel since order matters.
await newContractInstance.methods.extendCentroid(
centroids[classification].slice(j, j + chunkSize), classification).send().on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting feature indices for [${j},${j + chunkSize}) for the "${classifications[classification]}" class`, { variant: 'error' })
notify(`Error setting feature indices for [${j},${Math.min(j + chunkSize, centroids[classification].length)}) for the "${classifications[classification]}" class`, { variant: 'error' })
throw err
})
}
@ -214,16 +215,16 @@ export class ModelDeployer {
if (typeof sparseModel.sparseWeights === 'object' && sparseModel.sparseWeights !== null) {
for (let [featureIndexKey, weight] of Object.entries(sparseModel.sparseWeights)) {
const featureIndex = parseInt(featureIndexKey, 10)
sparseWeights.push([this.web3.utils.toHex(featureIndex), convertToHex(weight, this.web3, toFloat)])
sparseWeights.push([featureIndex, convertNum(weight, this.web3, toFloat)])
}
}
}
if (model.weights !== undefined && model.weights !== null && Array.isArray(model.weights)) {
weightsArray = convertDataToHex(model.weights, this.web3, toFloat)
weightsArray = convertData(model.weights, this.web3, toFloat)
}
const intercept = convertToHex(model.intercept, this.web3, toFloat)
const learningRate = convertToHex(model.learningRate || defaultLearningRate, this.web3, toFloat)
const intercept = convertNum(model.intercept, this.web3, toFloat)
const learningRate = convertNum(model.learningRate || defaultLearningRate, this.web3, toFloat)
if (featureIndices !== undefined && featureIndices.length !== weightsArray.length + sparseWeights.length) {
return Promise.reject("The number of features must match the number of weights.")
@ -262,7 +263,10 @@ export class ModelDeployer {
// Subtract 1 from the count because the first chunk has already been uploaded.
const notification = notify(`Please accept the prompt to upload classifier
weights [${i},${Math.min(i + weightChunkSize, weightsArray.length)}) (${i / weightChunkSize}/${Math.ceil(weightsArray.length / weightChunkSize) - 1})`)
await transaction.send().on('transactionHash', () => {
await transaction.send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
@ -274,27 +278,32 @@ export class ModelDeployer {
// Add feature indices to use.
for (let i = 0; i < featureIndices.length; i += weightChunkSize) {
const notification = notify(`Please accept the prompt to upload the feature indices [${i},${Math.min(i + weightChunkSize, featureIndices.length)})`)
await newContractInstance.methods.addFeatureIndices(featureIndices.slice(i, i + weightChunkSize)).send()
.on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting feature indices for [${i},${Math.min(i + weightChunkSize, featureIndices.length)})`, { variant: 'error' })
console.error(err)
})
}
}
for (let i = 0; i < sparseWeights.length; i += Math.round(weightChunkSize / 2)) {
const notification = notify(`Please accept the prompt to upload sparse classifier weights [${i},${i + Math.round(weightChunkSize / 2)}) out of ${sparseWeights.length}`)
await newContractInstance.methods.initializeSparseWeights(
sparseWeights.slice(i, i + Math.round(weightChunkSize / 2))).send().on('transactionHash', () => {
await newContractInstance.methods.addFeatureIndices(featureIndices.slice(i, i + weightChunkSize)).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting sparse classifier weights [${i},${i + Math.round(weightChunkSize / 2)}) out of ${sparseWeights.length}`, { variant: 'error' })
throw err
notify(`Error setting feature indices for [${i},${Math.min(i + weightChunkSize, featureIndices.length)})`, { variant: 'error' })
console.error(err)
})
}
}
const sparseWeightsChunkSize = Math.round(weightChunkSize / 2)
for (let i = 0; i < sparseWeights.length; i += sparseWeightsChunkSize) {
const notification = notify(`Please accept the prompt to upload sparse classifier weights [${i},${Math.min(i + sparseWeightsChunkSize, sparseWeights.length)}) out of ${sparseWeights.length}`)
await newContractInstance.methods.initializeSparseWeights(sparseWeights.slice(i, i + sparseWeightsChunkSize)).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting sparse classifier weights [${i},${Math.min(i + sparseWeightsChunkSize, sparseWeights.length)}) out of ${sparseWeights.length}`, { variant: 'error' })
throw err
})
}
notify(`The model contract has been deployed to ${newContractInstance.options.address}`, { variant: 'success' })

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -143,7 +143,7 @@ export class SparsePerceptronModel extends Model {
type: 'sparse perceptron',
public classifications: string[],
public weights: number[],
public sparseWeights: { [featureIndex: string]: number },
public sparseWeights: { [featureIndex: string]: number } | null,
public intercept: number,
public learningRate?: number,
public featureIndices?: number[],

Просмотреть файл

@ -107,7 +107,7 @@ contract('NaiveBayesClassifier', function (accounts) {
const updateResponse = await classifier.update(data, classification);
// To help with optimizing gas usage:
// console.log(` update gasUsed: ${updateResponse.receipt.gasUsed}`);
assert.isBelow(updateResponse.receipt.gasUsed, 106424 + 1, "Too much gas used.");
assert.isBelow(updateResponse.receipt.gasUsed, 113704 + 1, "Too much gas used.");
for (let i in prevFeatureCounts) {
const featureIndex = data[i];

Просмотреть файл

@ -4,6 +4,7 @@ const Classifier = artifacts.require("./classification/SparsePerceptron");
const Stakeable64 = artifacts.require("./incentive/Stakeable64");
const { convertData, convertNum } = require('../../../src/float-utils-node');
const { deployModel } = require('../../../src/ml-models/deploy-model-node')
contract('CollaborativeTrainer with Perceptron', function (accounts) {
const toFloat = 1E9;
@ -262,4 +263,25 @@ contract('CollaborativeTrainer with Perceptron', function (accounts) {
});
});
});
it("...should initializeSparseWeights", async function () {
const model = {
type: 'sparse perceptron',
classifications,
weights: [0, 5, -1, 2.1, -1.4],
sparseWeights: { '8': 9.1, '12': -7.3 },
intercept: 2,
learningRate: 0.5,
}
const { classifierContract } = await deployModel(model, web3, toFloat)
assert.equal(await classifierContract.intercept().then(parseFloatBN), model.intercept)
assert.equal(await classifierContract.learningRate().then(parseFloatBN), model.learningRate)
for (let i = 0; i < model.weights; ++i) {
assert.equal(await classifierContract.weights(i).then(parseFloatBN), model.weights[i])
}
for (const [featureIndex, weight] of Object.entries(model.sparseWeights)) {
assert.equal(await classifierContract.weights(parseInt(featureIndex, 10)).then(parseFloatBN), weight)
}
});
});

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,10 +1,10 @@
{
"name": "decai-demo",
"version": "1.3.0",
"version": "1.4.0",
"license": "MIT",
"private": true,
"scripts": {
"blockchain": "mkdir --parents blockchain_db && ganache-cli --db blockchain_db --networkId 5782 --gasLimit 9000000 --gasPrice 1 --host 0.0.0.0 --port ${PORT:-7545} --mnemonic \"net pistol fun vibrant sausage vintage general silk weekend street brave rubber\"",
"blockchain": "mkdir --parents blockchain_db && ganache-cli --db blockchain_db --networkId 5782 --chainId 0xDeCA10B --gasLimit 9000000 --gasPrice 1 --host 0.0.0.0 --port ${PORT:-7545} --mnemonic \"net pistol fun vibrant sausage vintage general silk weekend street brave rubber\"",
"clean": "rm -rf blockchain_db db.sqlite client/src/contracts/compiled/*.json",
"client": "cd client && yarn start",
"prod-client": "cd client && yarn start-prod",
@ -16,7 +16,7 @@
"dependencies": {
"body-parser": "^1.18.3",
"express": "^4.16.4",
"ganache-cli": "6.5.1",
"ganache-cli": "^6.12.1",
"sql.js": "^1.1.0"
},
"devDependencies": {

Разница между файлами не показана из-за своего большого размера Загрузить разницу