зеркало из https://github.com/microsoft/landcover.git
Moved to point corrections in web tool, fixed typo in main.js
This commit is contained in:
Родитель
87dfc67fe6
Коммит
f99f023d77
|
@ -13,7 +13,6 @@ LOGGER = logging.getLogger("server")
|
|||
from . import ROOT_DIR
|
||||
from .DataLoader import DataLoaderCustom, DataLoaderUSALayer, DataLoaderBasemap
|
||||
|
||||
_DATASET_FN = "datasets.json"
|
||||
|
||||
def get_area_from_geometry(geom, src_crs="epsg:4326"):
|
||||
if geom["type"] == "Polygon":
|
||||
|
@ -89,9 +88,9 @@ def _load_dataset(dataset):
|
|||
}
|
||||
|
||||
def load_datasets():
|
||||
dataset_json = json.load(open(os.path.join(ROOT_DIR, _DATASET_FN),"r"))
|
||||
datasets = dict()
|
||||
|
||||
dataset_json = json.load(open(os.path.join(ROOT_DIR, "datasets.json"),"r"))
|
||||
for key, dataset in dataset_json.items():
|
||||
dataset_object = _load_dataset(dataset)
|
||||
|
||||
|
@ -100,4 +99,18 @@ def load_datasets():
|
|||
else:
|
||||
datasets[key] = dataset_object
|
||||
|
||||
if os.path.exists(os.path.join(ROOT_DIR, "datasets.mine.json")):
|
||||
dataset_json = json.load(open(os.path.join(ROOT_DIR, "datasets.mine.json"),"r"))
|
||||
for key, dataset in dataset_json.items():
|
||||
|
||||
if key not in datasets:
|
||||
dataset_object = _load_dataset(dataset)
|
||||
|
||||
if dataset_object is False:
|
||||
LOGGER.warning("Files are missing, we will not be able to serve the following dataset: '%s'" % (key))
|
||||
else:
|
||||
datasets[key] = dataset_object
|
||||
else:
|
||||
LOGGER.warning("There is a conflicting dataset key in datasets.mine.json, skipping.")
|
||||
|
||||
return datasets
|
|
@ -6,8 +6,6 @@ LOGGER = logging.getLogger("server")
|
|||
|
||||
from . import ROOT_DIR
|
||||
|
||||
_MODEL_FN = "models.json"
|
||||
|
||||
|
||||
def _load_model(model):
|
||||
if not os.path.exists(model["model"]["fn"]):
|
||||
|
@ -18,9 +16,9 @@ def _load_model(model):
|
|||
}
|
||||
|
||||
def load_models():
|
||||
model_json = json.load(open(os.path.join(ROOT_DIR,_MODEL_FN),"r"))
|
||||
models = dict()
|
||||
|
||||
|
||||
model_json = json.load(open(os.path.join(ROOT_DIR,"models.json"),"r"))
|
||||
for key, model in model_json.items():
|
||||
model_object = _load_model(model)
|
||||
|
||||
|
@ -29,4 +27,19 @@ def load_models():
|
|||
else:
|
||||
models[key] = model_object
|
||||
|
||||
|
||||
if os.path.exists(os.path.join(ROOT_DIR, "models.mine.json")):
|
||||
model_json = json.load(open(os.path.join(ROOT_DIR,"models.mine.json"),"r"))
|
||||
for key, model in model_json.items():
|
||||
|
||||
if key not in models:
|
||||
model_object = _load_model(model)
|
||||
|
||||
if model_object is False:
|
||||
LOGGER.warning("Files are missing, we will not be able to serve the following model: '%s'" % (key))
|
||||
else:
|
||||
models[key] = model_object
|
||||
else:
|
||||
LOGGER.warning("There is a conflicting dataset key in models.mine.json, skipping.")
|
||||
|
||||
return models
|
|
@ -35,7 +35,7 @@ class ModelRPC(BackendModel):
|
|||
def retrain(self):
|
||||
return self.connection.root.exposed_retrain()
|
||||
def add_sample_point(self, row, col, class_idx):
|
||||
return self.connection.root.exposed_add_sample(row, col, class_idx)
|
||||
return self.connection.root.exposed_add_sample_point(row, col, class_idx)
|
||||
def undo(self):
|
||||
return self.connection.root.exposed_undo()
|
||||
def reset(self):
|
||||
|
|
|
@ -104,7 +104,7 @@ class TorchFineTuning(BackendModel):
|
|||
return output
|
||||
|
||||
def retrain(self, **kwargs):
|
||||
x_train = np.stack(self.corr_features)
|
||||
x_train = np.array(self.corr_features)
|
||||
y_train = np.array(self.corr_labels)
|
||||
|
||||
|
||||
|
|
|
@ -37,6 +37,27 @@ var DATASETS = (function () {
|
|||
return json;
|
||||
})();
|
||||
|
||||
|
||||
(function () {
|
||||
var json = Object();
|
||||
$.ajax({
|
||||
'async': false,
|
||||
'url': 'datasets.mine.json',
|
||||
'dataType': "json",
|
||||
'success': function(data){
|
||||
json = data;
|
||||
}
|
||||
});
|
||||
|
||||
for(k in json){
|
||||
if(!(k in DATASETS)){
|
||||
DATASETS[k] = json[k];
|
||||
}else{
|
||||
console.debug("Skipping a duplicate key in datasets.mine.json");
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
var MODELS = (function () {
|
||||
var json = null;
|
||||
$.ajax({
|
||||
|
@ -50,6 +71,26 @@ var MODELS = (function () {
|
|||
return json;
|
||||
})();
|
||||
|
||||
(function () {
|
||||
var json = Object();
|
||||
$.ajax({
|
||||
'async': false,
|
||||
'url': 'models.mine.json',
|
||||
'dataType': "json",
|
||||
'success': function(data){
|
||||
json = data;
|
||||
}
|
||||
});
|
||||
|
||||
for(k in json){
|
||||
if(!(k in MODELS)){
|
||||
MODELS[k] = json[k];
|
||||
}else{
|
||||
console.debug("Skipping a duplicate key in models.mine.json");
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
var CLASSES = [];
|
||||
|
||||
//GUI elements
|
||||
|
|
|
@ -295,7 +295,7 @@ var requestPatch = function(idx, polygon, currentImgIdx, serviceURL){
|
|||
"xmin": topleftProjected.x,
|
||||
"ymax": topleftProjected.y,
|
||||
"ymin": bottomrightProjected.y,
|
||||
"crs": "espg:3857"
|
||||
"crs": "epsg:3857"
|
||||
},
|
||||
"classes": CLASSES,
|
||||
"SESSION": SESSION_ID
|
||||
|
|
|
@ -204,14 +204,38 @@
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
var data1 = $.getJSON('datasets.json');
|
||||
var data2 = $.getJSON('models.json');
|
||||
var data = [
|
||||
$.getJSON('datasets.json'),
|
||||
$.getJSON('models.json'),
|
||||
$.getJSON('datasets.mine.json'),
|
||||
$.getJSON('models.mine.json')
|
||||
];
|
||||
|
||||
Promise.all([data1, data2]).then(function (results) {
|
||||
datasets = results[0];
|
||||
models = results[1];
|
||||
Promise.allSettled(data).then(function (results) {
|
||||
datasets = results[0].value;
|
||||
models = results[1].value
|
||||
|
||||
if(results[2].status == "fulfilled"){
|
||||
for(k in results[2].value){
|
||||
if(!(k in datasets)){
|
||||
datasets[k] = results[2].value[k];
|
||||
}else{
|
||||
console.debug("Skipping a duplicate key in datasets.mine.json");
|
||||
}
|
||||
}
|
||||
}
|
||||
if(results[3].status == "fulfilled"){
|
||||
for(k in results[3].value){
|
||||
if(!(k in models)){
|
||||
models[k] = results[3].value[k];
|
||||
}else{
|
||||
console.debug("Skipping a duplicate key in models.mine.json");
|
||||
}
|
||||
}
|
||||
}
|
||||
main();
|
||||
});
|
||||
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
|
@ -41,8 +41,8 @@ class MyService(rpyc.Service):
|
|||
def exposed_retrain(self):
|
||||
return self.model.retrain()
|
||||
|
||||
def exposed_add_sample(self, tdst_row, bdst_row, tdst_col, bdst_col, class_idx):
|
||||
return self.model.add_sample(tdst_row, bdst_row, tdst_col, bdst_col, class_idx)
|
||||
def exposed_add_sample_point(self, row, col, class_idx):
|
||||
return self.model.add_sample_point(row, col, class_idx)
|
||||
|
||||
def exposed_undo(self):
|
||||
return self.model.undo()
|
||||
|
|
Загрузка…
Ссылка в новой задаче