diff --git a/.Rbuildignore b/.Rbuildignore new file mode 100644 index 00000000..112ad268 --- /dev/null +++ b/.Rbuildignore @@ -0,0 +1,3 @@ +^.*\.Rproj$ +^\.Rproj\.user$ +^\.travis\.yml$ diff --git a/.gitignore b/.gitignore index cc783686..9433b675 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,177 @@ +# Compiled Object files +*.slo +*.lo +*.o +*.obj +*.gch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Co + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app + +obj/* + +## Core latex/pdflatex auxiliary files: +*.aux +*.lof +*.log +*.lot +*.fls +*.out +*.toc + +## Intermediate documents: +*.dvi +# these rules might exclude image files for figures etc. +# *.ps +# *.eps +# *.pdf + +## Bibliography auxiliary files (bibtex/biblatex/biber): +*.bbl +*.bcf +*.blg +*-blx.aux +*-blx.bib +*.run.xml + +## Build tool auxiliary files: +*.fdb_latexmk +*.synctex.gz +*.synctex.gz(busy) +*.pdfsync + +## Auxiliary and intermediate files from other packages: + +# algorithms +*.alg +*.loa + +# amsthm +*.thm + +# beamer +*.nav +*.snm +*.vrb + +# glossaries +*.acn +*.acr +*.glg +*.glo +*.gls + +# hyperref +*.brf + +# listings +*.lol + +# makeidx +*.idx +*.ilg +*.ind +*.ist + +# minitoc +*.maf +*.mtc +*.mtc0 + +# minted +*.pyg + +# nomencl +*.nlo + +# sagetex +*.sagetex.sage +*.sagetex.py +*.sagetex.scmd + +# sympy +*.sout +*.sympy +sympy-plots-for-*.tex/ + +# todonotes +*.tdo + +# xindy +*.xdy + +*.so +*.o +*.a + +# R history files etc. +.Rbuildignore +.Rhistory +.Rproj.user +.RData +..Rcheck +*.Rproj + +# Vim files +*~ +*.swp + +# +Debug +.cproject +.project +.settings +..Rcheck/ + +# file with main for tests +src/main.cpp +test/cpp/svm/svm_test_model.txt +test/cpp/svm/libsvm_test + +# log file +tests/cpp/cec/extra/valgrind/*.txt + +# Autogenerated by testthat +tests/testthat/Rplots.pdf + +# Test outputs +tests/testthat/trans_predictions +trans_predictions + +# svm big data +inst/data_sets/svm/colon-cancer +inst/data_sets/svm + +.depend/ +build/ *.so *.o +*.d +*.bin +*~ +data/ +/TestParts/ +/TestIntegrationFix/ + +.idea/ + +# compiled samples +demo/samples/*.html + +# misc +*.log diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..12d4ca91 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,31 @@ +# Sample .travis.yml for R projects. +# +# see the wiki: +# https://github.com/craigcitro/r-travis/wiki + +language: c + +sudo: required + +before_install: + - curl -OL http://raw.github.com/craigcitro/r-travis/master/scripts/travis-tool.sh + - chmod 755 ./travis-tool.sh + - ./travis-tool.sh bootstrap + # Test datasets + - cd inst/dev/datasets/svm/ && ./download_large.sh && cd ../../../.. + - cd inst/dev/datasets/svm/ && ./download_transduction.sh && cd ../../../.. + +install: + - ./travis-tool.sh install_deps + # Install dependencies of R tests + - Rscript -e 'library("devtools"); library("methods"); options(repos=c(CRAN="http://cran.rstudio.com")); install_deps(dependencies = "Suggests")' + +script: ./travis-tool.sh run_tests + +after_failure: + - ./travis-tool.sh dump_logs + +notifications: + email: + on_success: change + on_failure: change diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 00000000..4a56a0d1 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.0) +project(gmumr_project) + +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake") +find_package(R) + +set(INCLUDE_DIR ${PROJECT_SOURCE_DIR}/inst/include CACHE PATH "Path to include directory") +set(SRC_DIR ${PROJECT_SOURCE_DIR}/src CACHE PATH "Path to gmum.r library source files") +set(TESTS_DIR ${PROJECT_SOURCE_DIR}/tests/cpp CACHE PATH "Path to C++ tests directory") +set(TEST_DATA_PATH ${PROJECT_SOURCE_DIR}/inst/dev/datasets CACHE PATH "Path to test input data directory") +set(GMUM_INCLUDE ${INCLUDE_DIR} ${INCLUDE_DIR}/cec ${INCLUDE_DIR}/gng ${INCLUDE_DIR}/svm ${INCLUDE_DIR}/svmlight ${INCLUDE_DIR}/utils CACHE TYPE LIST) + +set(CMAKE_SKIP_BUILD_RPATH FALSE) +set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) +set(CMAKE_INSTALL_RPATH "${SRC_DIR}") +set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) + +add_definitions(-DNDEBUG -DNDEBUG_GMUM -DNDEBUG_GMUM_2 -DRCPP_INTERFACE -DARMA_64BIT_WORD) +include_directories(${GMUM_INCLUDE} ${R_INCLUDE_DIR}) +link_directories(${R_LIB_DIRS}) + +add_subdirectory(${SRC_DIR}) +add_subdirectory(${TESTS_DIR}) diff --git a/DESCRIPTION b/DESCRIPTION index d62986ec..10eb4413 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,13 +1,32 @@ Package: gmum.r +Version: 0.2.1 +Date: 2015-09-25 +Title: GMUM Machine Learning Group Package Type: Package -Title: Package with models proposed by GMUM group at Jagiellonian University -Version: 1.0 -Date: 2014-02-25 -Author: Stanislaw Jastrzebski -Maintainer: Stanislaw Jastrzebski -Description: More about what it does (maybe more than one line) -License: MIT -Depends: - Rcpp (>= 0.10.4) -LinkingTo: Rcpp +Author: Wojciech Czarnecki, Stanislaw Jastrzebski, Marcin Data, Igor Sieradzki, Mateusz Bruno-Kaminski, Karol Jurek, Piotr Kowenzowski, Michal Pletty, Konrad Talik, Maciej Zgliczynski +Maintainer: Stanislaw Jastrzebski +Description: Direct R interface to Support Vector Machine libraries ('LIBSVM' and 'SVMLight') and efficient C++ implementations of Growing Neural Gas and models developed by 'GMUM' group (Cross Entropy Clustering and 2eSVM). +License: MIT + file LICENSE +Repository: CRAN +Imports: + ggplot2 (>= 1.0.0), + stats, + igraph, + SparseM, + Matrix, + httr, + Rcpp (>= 0.11.6), + methods, + grid +LinkingTo: Rcpp, RcppArmadillo, BH NeedsCompilation: yes +LazyData: yes +Suggests: + car, + caret, + e1071, + klaR, + testthat, + mlbench +URL: https://github.com/gmum/gmum.r +BugReports: https://github.com/gmum/gmum.r/issues diff --git a/LICENSE b/LICENSE index 35aa5989..2a4a055d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,20 +1,2 @@ -The MIT License (MIT) - -Copyright (c) 2013 GyMnasiUM - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +YEAR: 2015 +COPYRIGHT HOLDER: GMUM diff --git a/NAMESPACE b/NAMESPACE old mode 100755 new mode 100644 index d5a6ebc7..f81ff63c --- a/NAMESPACE +++ b/NAMESPACE @@ -1,2 +1,110 @@ +# Generated by roxygen2 (4.1.1): do not edit by hand + +S3method(SVM,default) +S3method(SVM,formula) +S3method(clustering,Rcpp_CecModel) +S3method(clustering,Rcpp_GNGServer) +S3method(node,Rcpp_GNGServer) +S3method(pause,Rcpp_GNGServer) +S3method(plot,MultiClassSVM) +S3method(plot,Rcpp_CecModel) +S3method(plot,Rcpp_GNGServer) +S3method(plot,Rcpp_SVMClient) +S3method(predict,MultiClassSVM) +S3method(predict,Rcpp_CecModel) +S3method(predict,Rcpp_GNGServer) +S3method(predict,Rcpp_SVMClient) +S3method(print,MultiClassSVM) +S3method(print,Rcpp_CecModel) +S3method(print,Rcpp_GNGServer) +S3method(print,Rcpp_SVMClient) +S3method(run,Rcpp_GNGServer) +S3method(summary,MultiClassSVM) +S3method(summary,Rcpp_CecModel) +S3method(summary,Rcpp_GNGServer) +S3method(summary,Rcpp_SVMClient) +S3method(terminate,Rcpp_GNGServer) +export(CEC) +export(GNG) +export(OptimizedGNG) +export(SVM) +export(calculateCentroids) +export(caret.gmumSvmLinear) +export(caret.gmumSvmPoly) +export(caret.gmumSvmRadial) +export(centers) +export(clustering) +export(convertToIGraph) +export(covMatrix) +export(energy) +export(errorStatistics) +export(findClosests) +export(get.wine.dataset.X) +export(get.wine.dataset.y) +export(getDataset) +export(gng.plot.layout.v2d) +export(gng.preset.cube) +export(gng.preset.plane) +export(gng.preset.sphere) +export(gngLoad) +export(gngSave) +export(insertExamples) +export(isRunning) +export(iterations) +export(logClusters) +export(logEnergy) +export(meanError) +export(node) +export(numberNodes) +export(pause) +export(predictComponent) +export(run) +export(runAll) +export(runOneIteration) +export(svm.accuracy) +export(terminate) +exportClasses(MultiClassSVM) +exportClasses(Rcpp_CecConfiguration) +exportClasses(Rcpp_CecModel) +exportClasses(Rcpp_GNGServer) +exportClasses(Rcpp_SVMClient) +import(Rcpp) +import(igraph) +importFrom(Matrix,Matrix) +importFrom(ggplot2,aes) +importFrom(ggplot2,geom_abline) +importFrom(ggplot2,geom_point) +importFrom(ggplot2,geom_tile) +importFrom(ggplot2,ggplot) +importFrom(ggplot2,scale_alpha_identity) +importFrom(ggplot2,scale_colour_brewer) +importFrom(ggplot2,scale_fill_brewer) +importFrom(ggplot2,scale_size_continuous) +importFrom(ggplot2,scale_size_identity) +importFrom(grDevices,rainbow) +importFrom(graphics,hist) +importFrom(graphics,pairs) +importFrom(graphics,par) +importFrom(graphics,plot) +importFrom(graphics,plot.new) +importFrom(graphics,points) +importFrom(graphics,title) +importFrom(grid,grid.layout) +importFrom(grid,grid.newpage) +importFrom(grid,pushViewport) +importFrom(grid,viewport) +importFrom(httr,GET) +importFrom(httr,content) +importFrom(methods,as) +importFrom(methods,hasArg) +importFrom(methods,is) +importFrom(methods,new) +importFrom(stats,na.omit) +importFrom(stats,prcomp) +importFrom(stats,predict) +importFrom(stats,rnorm) +importFrom(stats,runif) +importFrom(stats,update) +importFrom(utils,data) +importFrom(utils,read.csv) useDynLib(gmum.r) -export(hello_gmum) diff --git a/R/R_scripts/3DDataGenerator.R b/R/R_scripts/3DDataGenerator.R new file mode 100644 index 00000000..968e0169 --- /dev/null +++ b/R/R_scripts/3DDataGenerator.R @@ -0,0 +1,27 @@ +gaussGenerator<-function(listSizeOfData,dimensionOfData){ + dataSeat<-matrix(,,dimensionOfData) + label<-c() + for(i in (1:length(listSizeOfData))){ + n<-listSizeOfData[i]; + rotaion<-matrix(rnorm(dimensionOfData*dimensionOfData,0,1), nrow = dimensionOfData) + dataTemp=matrix(rnorm(n*dimensionOfData,0,1),n,dimensionOfData); + m1=2*rnorm(1,0,1); + m2=m1; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp%*%rotaion+mean) + label<-c(label,rep(i,n)) + } + dataSeat<-tail(dataSeat,n=-1) + list(data=dataSeat,label=label) +} + +listSizeOfData<-c(700,300,1000) +dimensionOfData<-3 + +test<-gaussGenerator(listSizeOfData,dimensionOfData) + +plot(test$data,pch=20,col=test$label) + +#write.table(test$data,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data.txt",row.names=FALSE,col.names=FALSE); +#write.table(test$label,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data_cluster.txt",row.names=FALSE,col.names=FALSE,sep = "\n"); + diff --git a/R/R_scripts/bigDataGenerator.R b/R/R_scripts/bigDataGenerator.R new file mode 100644 index 00000000..968e0169 --- /dev/null +++ b/R/R_scripts/bigDataGenerator.R @@ -0,0 +1,27 @@ +gaussGenerator<-function(listSizeOfData,dimensionOfData){ + dataSeat<-matrix(,,dimensionOfData) + label<-c() + for(i in (1:length(listSizeOfData))){ + n<-listSizeOfData[i]; + rotaion<-matrix(rnorm(dimensionOfData*dimensionOfData,0,1), nrow = dimensionOfData) + dataTemp=matrix(rnorm(n*dimensionOfData,0,1),n,dimensionOfData); + m1=2*rnorm(1,0,1); + m2=m1; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp%*%rotaion+mean) + label<-c(label,rep(i,n)) + } + dataSeat<-tail(dataSeat,n=-1) + list(data=dataSeat,label=label) +} + +listSizeOfData<-c(700,300,1000) +dimensionOfData<-3 + +test<-gaussGenerator(listSizeOfData,dimensionOfData) + +plot(test$data,pch=20,col=test$label) + +#write.table(test$data,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data.txt",row.names=FALSE,col.names=FALSE); +#write.table(test$label,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data_cluster.txt",row.names=FALSE,col.names=FALSE,sep = "\n"); + diff --git a/R/R_scripts/comparative.tests.2e.R b/R/R_scripts/comparative.tests.2e.R new file mode 100644 index 00000000..fb2b9518 --- /dev/null +++ b/R/R_scripts/comparative.tests.2e.R @@ -0,0 +1,93 @@ +#I HATE R + +# General results: SVMLight lack of control over convergence time, by default much longer convergence in RBF case + +# Tested: cv time, final model time, accuracy, #sv, cv scores similarity + +library(e1071) +library(doMC) +registerDoMC() # parallel foreach +library(caret) +library(SparseM) +library(mlbench) +library(knitr) +#source("caret.svm.models.R") + + +# Download libsvm datasets to data_local folder: +# australian diabetes german.numer sonar_scale +# breast-cancer fourclass heart splice +# Read binary datasets +datasets = lapply(list.files(path="data_local"), function(x){ ds=read.matrix.csr(paste("data_local",x,sep="/")); ds$name=x; ds}) + +get.results <- function(name, data, class.column.name, params, model, seed=777, ...){ + + class.column.index <- which(colnames(data)==class.column.name) + p = 0.66 + inTraining <- 1:round(nrow(data) * p) + training <- data[ inTraining,] + testing <- data[-inTraining,] + + params <- params$C + models <- sapply(params, function(x) SVM(formula(paste(class.column.name,"~ .", sep=" ")), prep="2e", training, kernel="linear", C=x, ...)) + + l <-c() + row_length <- 0 + for(model in models){ + class.column.index < which(colnames(data)==class.column.name) + pred_test <- predict(model, subset(testing, select=-c(class.column.index))) + pred_train <- predict(model, subset(training, select=-c(class.column.index))) + cf_test <- confusionMatrix(pred_test, testing[, class.column.index]) + cf_train <- confusionMatrix(pred_train, training[, class.column.index]) + + score_row = c(name, as.numeric(cf_train$overall[1]), as.numeric(cf_test$overall[1]), model$SV, model$C, model$iterations) + row_length <- length(score_row) + l <- c(l, score_row) + } + result <- data.frame(matrix(l, nrow=length(params), ncol=row_length, byrow=TRUE)) + colnames(result) <- c("ExperimentName", "trAcc", "tstAcc", "SVs", "C", "iterations") + result +} + +# Coarse CV grid +get.all.results <- function(ds, fit.svmlight=TRUE, fit.small.grid=TRUE){ + subset <- c(1) + + C <- 10^(-6:7) + + if(fit.small.grid){ + C <- 10^(-1:1) + } + + model.names=list("gmum.r::svm.linear") + model.calls=list(caret.gmumSvmLinear) + model.tuneGrids = list(expand.grid(C=C)) + + model.names=model.names[subset] + model.calls=model.calls[subset] + model.tuneGrids = model.tuneGrids[subset] + + df <- as.data.frame(as.matrix(ds$x)) + df$y = ds$y + df$name + + res <- matrix(, nrow = 0, ncol = 6) + for(model.index in 1:length(model.names)){ + name <- model.names[[model.index]] + if(grepl("^gmum", name)){ + name.experiment <- paste("libsvm_",ds$name, sep="") + res <- rbind(res, get.results(name.experiment, df, "y", model.tuneGrids[[model.index]], model.calls[[model.index]], verbosity=0, lib='libsvm')) + if(fit.svmlight){ + name.experiment <- paste("svmlight_",ds$name, sep="") + res <- rbind(res, get.results(name.experiment, df, "y", model.tuneGrids[[model.index]], model.calls[[model.index]], verbosity=0, lib='svmlight')) + } + } + } + + print(kable(res, format = "markdown")) + +} + +for(ds in datasets){ + get.all.results(ds, fit.svmlight = TRUE, fit.small.grid = TRUE) +} diff --git a/R/R_scripts/comparative.tests.R b/R/R_scripts/comparative.tests.R new file mode 100644 index 00000000..488e22b8 --- /dev/null +++ b/R/R_scripts/comparative.tests.R @@ -0,0 +1,145 @@ +#I HATE R + +# General results: SVMLight lack of control over convergence time, by default much longer convergence in RBF case + +# Tested: cv time, final model time, accuracy, #sv, cv scores similarity + +library(e1071) +library(doMC) +registerDoMC() # parallel foreach +library(caret) +library(SparseM) +library(mlbench) +library(knitr) +source("caret.svm.models.R") + + +# Download libsvm datasets to data_local folder: +# australian diabetes german.numer sonar_scale +# breast-cancer fourclass heart splice +# Read binary datasets +datasets = lapply(list.files(path="data_local"), function(x){ ds=read.matrix.csr(paste("data_local",x,sep="/")); ds$name=x; ds}) + +get.results <- function(name, data, class.column.name, params, model, seed=777, ...){ + set.seed(seed) + + class.column.index <- which(colnames(data)==class.column.name) + inTraining <- createDataPartition(data[, class.column.index], p = .75, list = FALSE) + training <- data[ inTraining,] + testing <- data[-inTraining,] + + + + fitControl <- trainControl(method = "cv", + ## 10-fold CV... + number = 7, + ## repeated 2 times + repeats = 1, + verboseIter=FALSE + ) + + + + model <- train(formula(paste(class.column.name,"~ .", sep=" ")), data = training, + method = model, + preProc = c("center", "scale"), + tuneLength = 8, + tuneGrid = params, + trControl = fitControl, + ...) + + + class.column.index < which(colnames(data)==class.column.name) + pred <- predict(model$finalModel, predict(model$preProcess, subset(testing, select=-c(class.column.index)))) + cf <- confusionMatrix(pred, testing[, class.column.index]) + + list(name=name, + trainAcc=max(model$results$Accuracy), + trainAccStd=model$results$AccuracySD[which.max(model$results$Accuracy)], + testAcc=as.numeric(cf$overall[1]), + trainTime=as.numeric(model$times$everything[1]), + finalModelTime=as.numeric(model$times$final[1])) +} + +# Coarse CV grid +get.all.results <- function(ds, fit.svmlight=TRUE, fit.small.grid=TRUE){ + subset <- c(1,2,4,5) + + C <- 10^(-6:7) + C_poly <- 10^(-4:3) + Gamma <- 10^(-8:8) + degree <- c(2,3) + + if(fit.small.grid){ + C <- 10^(-5:4) + C_poly <- 10^(-4:3) + Gamma <- 10^(-6:7) + degree <- c(2,3) + } + + model.names=list("gmum.r::svm.radial", "gmum.r::svm.linear", "gmum.r::svm.poly", + "kernLab::svm.radial", "kernLab::svm.linear", "kernLab::svm.poly") + model.calls=list(caret.gmumSvmRadial, caret.gmumSvmLinear, caret.gmumSvmPoly, + "svmRadial", "svmLinear", "svmPoly") + model.tuneGrids = list(expand.grid(C=C, gamma=Gamma), expand.grid(C=C), + expand.grid(C=C_poly, gamma=Gamma, degree=degree, coef0=c(0)), + expand.grid(C=C, sigma=Gamma), expand.grid(C=C), + expand.grid(C=C_poly, scale=Gamma, degree=degree)) + + + + model.names=model.names[subset] + model.calls=model.calls[subset] + model.tuneGrids = model.tuneGrids[subset] + + df <- as.data.frame(as.matrix(ds$x)) + df$y = ds$y + df$name + + model.results <- list() + for(model.index in 1:length(model.names)){ + name <- model.names[[model.index]] + if(grepl("^gmum", name)){ + print("Fitting") + name.experiment <- paste("libsvm_",name,ds$name, sep="") + print(name.experiment) + model.results[[name.experiment]] = + get.results(name.experiment, df, "y", model.tuneGrids[[model.index]], model.calls[[model.index]], verbosity=0, lib='libsvm') + if(fit.svmlight){ + print("Fitting") + name.experiment <- paste("svmlight_",name,ds$name, sep="") + print(name.experiment) + model.results[[name.experiment]] = + get.results(name.experiment, df, "y", model.tuneGrids[[model.index]], model.calls[[model.index]], verbosity=0, lib='svmlight') + } + }else{ + print("Fitting") + print(name) + print(ds$name) + model.results[[name]] = + get.results(name, df, "y", model.tuneGrids[[model.index]], model.calls[[model.index]]) + } + save(list="model.results", file=paste("comparative.tests.", ds$name, model.index, ".RData", sep="")) + } + + M <- data.frame(trainAcc=sapply(model.results, function(x) x$trainAcc), + trainAccStd=sapply(model.results, function(x) x$trainAccStd), + testAcc=sapply(model.results, function(x) x$testAcc), + trainTime=sapply(model.results, function(x) x$trainTime), + finalModelTrainTime=sapply(model.results, function(x) x$finalModelTime) + ) + row.names(M) <- sapply(model.results, function(x) x$name) + + save(list="M", file=paste("comparative.tests.", ds$name, ".RData", sep="")) + + M +} + +for(ds in datasets){ + M <- get.all.results(ds, fit.svmlight = TRUE, fit.small.grid = TRUE) +} + + + + + diff --git a/R/R_scripts/energy.R b/R/R_scripts/energy.R new file mode 100644 index 00000000..ec187d87 --- /dev/null +++ b/R/R_scripts/energy.R @@ -0,0 +1,84 @@ +standard_entropy <- function(cluster_points) +{ + dimension <- dim(cluster_points)[2] + cluster_cov_mat <- cov_mat(cluster_points) + det_cluster_cov_mat <- det(cluster_cov_mat) + if(det_cluster_cov_mat == 0) + { + det_cluster_cov_mat <- 1.0e-32 + } + return( dimension/2 * log(2 * pi * exp(1)) + log(det_cluster_cov_mat) / 2 ) +} + +sphere_entropy <- function(cluster_points) +{ + dimension <- dim(cluster_points)[2] + cluster_cov_mat_trace <- cov_mat_trace(cluster_points) + if(cluster_cov_mat_trace == 0) + { + cluster_cov_mat_trace <- 1.0e-32 + } + return ( dimension/2 * log(2 * pi * exp(1) / dimension) + dimension / 2 * log(cluster_cov_mat_trace) ) +} + +diagonal_entropy <- function(cluster_points) +{ + dimension <- dim(cluster_points)[2] + cluster_cov_mat <- cov_mat(cluster_points) + det_cluster_cov_mat <- prod(diag(cluster_cov_mat)) + if(det_cluster_cov_mat == 0) + { + det_cluster_cov_mat <- 1.0e-32 + } + return ( dimension/2 * log(2 * pi * exp(1)) + log(det_cluster_cov_mat) / 2 ) +} + +cluster_energy <- function(cluster_entropy, cluster_npoints, npoints) +{ + p <- cluster_npoints / npoints + return( p * (cluster_entropy - log(p)) ) +} + +cov_mat <- function(cluster_points) +{ + npoints <- dim(cluster_points)[1] + dimension <- dim(cluster_points)[2] + mean <- as.vector(colMeans(cluster_points)) + result <- matrix(nrow = dimension, ncol = dimension, data = 0) + for(i in 1:npoints) + { + p <- as.matrix(cluster_points[i, ] - mean) + result <- result + (p %*% t(p)) / npoints + } + return(result) +} + +cov_mat_trace <- function(cluster_points) +{ + npoints <- dim(cluster_points)[1] + mean <- as.vector(colMeans(cluster_points)) + result <- 0.0 + for(i in 1:npoints) + { + p <- cluster_points[i, ] - mean + result <- result + (p %*% p) + } + result <- result / npoints + return(result) +} + +cec_energy <- function(dataset, clustering, entropy_func) +{ + dimension <- ncol(dataset) + npoints <- dim(dataset)[1] + energy <- 0 + for (i in unique(clustering)) + { + cluster_points <- dataset[clustering == i,] + cluster_npoints <- dim(cluster_points)[1] + curr_cluster_entropy <- entropy_func(cluster_points) + curr_cluster_energy <- cluster_energy(curr_cluster_entropy, cluster_npoints, npoints) + energy <- energy + curr_cluster_energy + } + return(as.numeric(energy)) +} diff --git a/R/R_scripts/energy.R.bak b/R/R_scripts/energy.R.bak new file mode 100644 index 00000000..067afbcd --- /dev/null +++ b/R/R_scripts/energy.R.bak @@ -0,0 +1,77 @@ + +tr <- function(mat){ + + return(sum(diag(mat))) +} +energyOneCluster <- function( kind, probab, covariances,dimOfData){ # Move this to separate function parameter passed + energy <- switch(kind, + { # 1 + + energy = probab*( -log(probab)+0.5*log(det(covariances) ) +dimOfData*0.5*log(2*pi*exp(1))) + }, + { # 2 + N = dimOfData + print("N") + print(N) + print("tr(covariances)") + print(tr(covariances)) + print("log(tr(covariances) )") + print(log(tr(covariances) )) + p = probab + print("p") + print(p) + energy = probab*( -log(probab)+0.5*dimOfData*log(tr(covariances) ) +dimOfData*0.5*log(2*pi*exp(1)/dimOfData)) + }) + print("energy") + print(energy) + return(energy) +} + +CECEnergy<-function(dataSet,label,kind){ + k<-length(unique(label)) + dim<-ncol(dataSet) + means <- matrix(0,k,dim) + probab<- matrix(0,k,1) + energyArray<- matrix(0,k,1) + covariances <- array(c(1:k*dim*dim),c(k,dim,dim)) + energy<-0 + for (i in 1:k){ + means[i,] <- apply(dataSet[label == i,],2,mean) + covariances[i,,] <- ((length(dataSet[,1])-1)/length(dataSet[,1]))*cov(dataSet[label == i,]) + probab[i] <- length(dataSet[label == i,1])/length(dataSet[,1]) + energyArray[i] <- energyOneCluster(kind=2, probab=probab[i], covariances=covariances[i,,],dim) + energy <- energy + energyOneCluster(kind=2, probab=probab[i], covariances=covariances[i,,],dim) + } + list(means=means,covariances=covariances,energyArray=energyArray,energy=energy) +} + +dataPath = file.path("..","..","inst","data_sets","cec") +simplePath = file.path(dataPath,"simple_1") +mousePath = file.path(dataPath,"mouse_1") +mousePathS = file.path(dataPath,"mouse_1_spherical") +ellipsePath = file.path(dataPath,"EllipseGauss") +rpath = mousePathS +dataSetPath = file.path(rpath,"input.txt") +labelPathPrzemek = file.path(rpath,"cluster.txt") +labelPathMy = file.path(rpath,"our_clusters.txt") + +dataSet <- matrix(as.numeric(as.matrix(read.table(dataSetPath),skipNul=TRUE)),ncol=2); + +labelPrzemek <- as.matrix(read.table(labelPathPrzemek)); +print(min(labelPrzemek)) +labelPrzemek<- labelPrzemek + (1 - min(labelPrzemek)) +#labelMy <- as.matrix(read.table(labelPathMy)); +#print (min(labelMy)) +#labelMy<- labelMy + (1 - min(labelMy)) + + +print(CECEnergy(dataSet,labelPrzemek,1)) +#print(CECEnergy(dataSet,labelMy,1)) + + +#file.path("gmum.r\\test\\data\\simple_1\\input.txt", fsep = .Platform$file.sep) +#plot(dataSet,pch=20,col=label) + + + + diff --git a/R/R_scripts/gaussGenerator.R b/R/R_scripts/gaussGenerator.R new file mode 100644 index 00000000..84ed57db --- /dev/null +++ b/R/R_scripts/gaussGenerator.R @@ -0,0 +1,28 @@ +gaussGenerator<-function(listSizeOfData,dimensionOfData){ + dataSeat<-matrix(,,dimensionOfData) + label<-c() + for(i in (1:length(listSizeOfData))){ + n<-listSizeOfData[i]; + rotaion<-matrix(rnorm(dimensionOfData*dimensionOfData,0,1), nrow = dimensionOfData) + dataTemp=matrix(rnorm(n*dimensionOfData,0,1),n,dimensionOfData); + m1=2*rnorm(1,0,1); + m2=m1; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp%*%rotaion+mean) + label<-c(label,rep(i,n)) + } + dataSeat<-tail(dataSeat,n=-1) + list(data=dataSeat,label=label) +} + +listSizeOfData<-c(700,300,1000) +dimensionOfData<-2 + +test<-gaussGenerator(listSizeOfData,dimensionOfData) + +plot(test$data,pch=20,col=test$label) + +#write.table(test$data,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data.txt",row.names=FALSE,col.names=FALSE); +#write.table(test$label,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data_cluster.txt",row.names=FALSE,col.names=FALSE,sep = "\n"); + + diff --git a/R/R_scripts/mouseGaussGenerator.R b/R/R_scripts/mouseGaussGenerator.R new file mode 100644 index 00000000..d754f149 --- /dev/null +++ b/R/R_scripts/mouseGaussGenerator.R @@ -0,0 +1,43 @@ +mouseGaussGenerator<-function(listSizeOfData,EarDistance){ + dimensionOfData<-2 + dataSeat<-matrix(,,dimensionOfData) + label<-c() + #glowa + n<-listSizeOfData[1] + r<-2; + dataTemp=matrix(rnorm(n*dimensionOfData,0,r),n,dimensionOfData); + m1=0; + m2=0; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp+mean) + label<-c(label,rep(1,n)) + #prawe ucho + r<-1; + n<-listSizeOfData[2] + dataTemp=matrix(rnorm(n*dimensionOfData,0,r),n,dimensionOfData); + m1=-1-2-EarDistance; + m2=1+2+EarDistance; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp+mean) + label<-c(label,rep(2,n)) + #lewe ucho + r<-1; + n<-listSizeOfData[3] + dataTemp=matrix(rnorm(n*dimensionOfData,0,r),n,dimensionOfData); + m1=1+2+EarDistance; + m2=1+2+EarDistance; + mean=matrix(c(rep(m1,n),rep(m2,n)),n,dimensionOfData); + dataSeat<-rbind(dataSeat,dataTemp+mean) + label<-c(label,rep(3,n)) + + dataSeat<-tail(dataSeat,n=-1) + list(data=dataSeat,label=label) +} +#listSizeOfData<-c(1000,500,500) +#EarDistance<-2 +#test<-mouseGaussGenerator(listSizeOfData,EarDistance) +#plot(test$data,pch=20,col=test$label) + +#write.table(test$data,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data.txt",row.names=FALSE,col.names=FALSE); +#write.table(test$label,file="C:\\Users\\admin\\Dropbox\\CEC_plugin_R\\TESTY\\mouse_1\\data_cluster.txt",row.names=FALSE,col.names=FALSE,sep = "\n"); + diff --git a/R/R_scripts/mouseUniformGenerator.R b/R/R_scripts/mouseUniformGenerator.R new file mode 100644 index 00000000..c7c2a916 --- /dev/null +++ b/R/R_scripts/mouseUniformGenerator.R @@ -0,0 +1,65 @@ +mouseGaussGenerator<-function(listSizeOfData,EarDistance){ + dataSeat<-matrix(,,dimensionOfData) + label<-c() + dimensionOfData<-2 + #glowa + n<-listSizeOfData[1] + r<-2; + X <- runif(4*n,min=-r,max=r) # generowanie z rozkładu jednostajnego na [-1,1] + Y <- runif(4*n,min=-r,max=r) + Accept <- X^2+Y^2 ((npoints - 1) / npoints)) ) + stop("control.eps = ", control.eps, " should be in range [0, (N-1)/N]!"); + + if (control.itmax < 0) + stop("Maximum number of iterations should be a natural number!"); + + if(is(params.cov, "data.frame")){ + params.cov = data.matrix(params.cov); + } + + config <- new(CecConfiguration) + + if(is.null(seed) == FALSE) { + config$.setSeed(seed) + } + config$.setDataSet(x) + config$.setEps(control.eps) + config$.setNrOfClusters(k) + + if(is.null(params.mix) == FALSE) { + config$.setMix(params.mix) + } + + if(is.null(params.function) == FALSE) { + config$.setFunction(params.function) + } + + config$.setLogEnergy(log.energy) + config$.setLogCluster(log.ncluster) + config$.setNstart(control.nstart) + config$.setCentroids(params.centroids) + config$.setMethodType(method.type) + config$.setCov(params.cov) + config$.setR(params.r) + config$.setMethodInit(method.init) + config$.setItmax(control.itmax) + config$.setAlgorithm('hartigan') + + model <- new(CecModel, config) + + + + assign("call", call, model) + assign("energy", model$.getEnergy(), model) + assign("clustering", model$.getClustering(), model) + assign("centers", model$.getCenters(), model) + assign("covMatrix", model$.getCovMatrix(), model) + assign("logEnergy", model$.getLogEnergy(), model) + assign("logNumberOfClusters", model$.getLogNumberOfClusters(), model) + + assign("iterations", model$.getIterations(), model) + + assign(".staticFields", c("call", "energy", "clustering", "centers", "covMatrix", + "logNumberOfClusters", "logEnergy", "iterations"), model) + + model +} + +runAll <- function(c) { + c$.runAll() +} + +runOneIteration <- function(c) { + c$.runOneIteration() +} + +energy <- function(c) { + c$.getEnergy() +} + +clustering.Rcpp_CecModel <- function(c) { + c$.getClustering() +} + +getDataset <- function(c) { + c$.getDataset() +} + +centers <- function(c) { + c$.getCenters() +} + +covMatrix <- function(c) { + c$.getCovMatrix() +} + +logEnergy <- function(c) { + c$.getLogEnergy() +} + +logNumberOfClusters <- function(c) { + c$.getLogNumberOfClusters() +} + + +iterations <- function(c) { + c$.getIterations() +} + +predict.Rcpp_CecModel <- function(object, x, ...) { + if ( !is(x, "data.frame") && !is(x, "matrix") && !is(x,"numeric") ) { + stop("Wrong target class, please provide data.frame, matrix or numeric vector") + } + + if(is(x, "vector")){ + x = matrix(x, nrow=1, byrow=TRUE) + } + else if (!is(x, "matrix")) { + x = data.matrix(x) + } + + if(dim(object$.getDataset())[2] != dim(x)[2]){ + stop("Incompatible dimension!") + } + + apply(x, 1, function(row) { + object$.predict(row) + }) +} + +#' Class Rcpp_CecModel. +#' +#' Class \code{Rcpp_CecModel} defines a CEC model class. +#' +#' @name Rcpp_CecModel-class +#' @exportClass Rcpp_CecModel +setClass(Class = "Rcpp_CecModel") + +#' Class Rcpp_CecConfiguration. +#' +#' Class \code{Rcpp_CecConfiguration} defines a CEC model configuration class. +#' +#' @name Rcpp_CecConfiguration-class +#' @exportClass Rcpp_CecConfiguration +setClass(Class = "Rcpp_CecConfiguration") diff --git a/R/cec.plot.R b/R/cec.plot.R new file mode 100644 index 00000000..372ca936 --- /dev/null +++ b/R/cec.plot.R @@ -0,0 +1,100 @@ +#' Plot CEC +#' @export +#' @method plot Rcpp_CecModel +#' +#' @title plot +#' +#' @rdname plot.cec +#' +#' @description Plot clustering found on 2D plot coloring by cluster. +#' +#' +#' @param x CEC model object. +#' @param slice List of dimentions chosen for display since plot is 2D. +#' @param ellipses Outline clusters. +#' @param centers Marks center of every cluster. +#' @param ... other arguments not used by this method. +#' +#' @param pca Apply PCA or not +#' +#' @examples +#' \dontrun{ +#' plot(cec) +#' plot(cec, slice=c(1,3), ellipses=TRUE) +#' plot(cec, slice=c(1,2,3)) +#' plot(cec, ellipses=TRUE, centers=FALSE) +#' plot(cec, pca=TRUE, ellipses=TRUE, centers=FALSE) +#' } +plot.Rcpp_CecModel <- function(x, slice = c(), pca=FALSE, ellipses = FALSE, centers = FALSE, ...) { + + d <- x$.getDataset() + if(pca){ + if(ncol(d) <= 2){ + stop("CEC dataset should have dimension > 2 to use PCA") + } + mx <- colMeans(d) + pca_data <- prcomp(d, scale=FALSE) + v <- pca_data$rotation + v <- v[, 1:2] + d <- pca_data$x + } + if (length(slice) == 0) { + if(pca){ + slice <- c(1,2) + } else { + slice <- c(1:(dim(d)[2])) + } + plot(d[,slice], col = (x$clustering + 1), pch=20) + } + else if (length(slice) == 1 || length(slice) == 2) { + plot(d[,slice], col = (x$clustering + 1), pch=20) + } + else{ + pairs(d[,slice], col = (x$clustering + 1)) + } + + if (ellipses || centers) { + cen <- x$centers + n <- length(cen) + if(pca){ + for (i in 1:n) { + # t(t(cen[[i]])) creates vector, t(v) is rotation matrix to lower dim subspace + # t(everything) makes it again a row + cen[[i]] <- t(t(v) %*% t(t(cen[[i]]))) + } + } + if (ellipses && length(slice) <= 2){ + #library("car") + cov <- x$covMatrix + for (i in 1:n) { + data <- unlist(cov[i]) + covMat <- matrix(data,ncol=sqrt(length(data))) + if(pca){ + covMat <- t(v) %*% covMat %*% v + } else { + covMat <- covMat[slice,slice] + } + m <-unlist(cen[i][slice]) + eigenValuesAndVectors <- eigen(covMat) + veE <- eigenValuesAndVectors$vectors + l <- eigenValuesAndVectors$values + r <- seq(-pi, pi, by = 0.001) + len <- length(r) + Xa <- 2*sqrt(l[1])*cos(r) + Ya <- 2*sqrt(l[2])*sin(r) + mm <- c(rep(m[1], len),rep(m[2],len)) + meansMultiply <- matrix(mm, ncol = 2) + line1 <- cbind(Xa,Ya) + lineAll <- rbind(line1) + ddd <- (lineAll%*%t(veE)) + meansMultiply + points(ddd,col = "black", type = "l", lwd = 2) + #dataEllipse(d[x$clustering() == (i-1),], plot.points=FALSE, add = TRUE, levels = c(0.9)) + } + } + + if(centers) { + mcenters <- do.call(rbind,cen) + points(mcenters[,slice], col="blue", bg=par("bg")) + } + } +} diff --git a/R/cec.print.R b/R/cec.print.R new file mode 100644 index 00000000..a1b40e99 --- /dev/null +++ b/R/cec.print.R @@ -0,0 +1,29 @@ +#' Print CEC +#' @export +#' @rdname print.cec +#' @method print Rcpp_CecModel +#' +#' @title print +#' +#' @description Print basic information about clusters found. +#' Presents a structure of the cec results object (clusters found) +#' +#' @docType methods +#' +#' @param x CEC object model. +#' @param ... other arguments not used by this method. +#' +print.Rcpp_CecModel <- function(x, ...) { + print(sprintf("CEC clustering; %d clusters with energy = %f", + length(x$centers), x$energy)) + print("Centers: ") + print(x$centers) + print("Covariances: ") + print(x$covMatrix) +} + +show.Rcpp_CecModel <- function(object){ + print(object) +} + +setMethod("show", "Rcpp_CecModel", show.Rcpp_CecModel) diff --git a/R/cec.summary.R b/R/cec.summary.R new file mode 100644 index 00000000..8ed03187 --- /dev/null +++ b/R/cec.summary.R @@ -0,0 +1,38 @@ +#' Summary CEC +#' @export +#' @rdname summary.cec +#' @method summary Rcpp_CecModel +#' +#' @title summary +#' +#' @description Print detailed information about CEC model object +#' +#' @docType methods +#' +#' @param object CEC model object. +#' @param ... other arguments not used by this method. +#' +summary.Rcpp_CecModel <- function(object, ...) { + print(object) + + if(isParameterOn(object$iterations)){ + print("Iterations: ") + print(object$iterations) + } + if(isParameterOn(object$logEnergy)){ + print("Energy for every iteration: ") + print(object$logEnergy) + } + if(isParameterOn(object$logNumberOfClusters)){ + print("Number of clusters for every iteration: ") + print(object$logNumberOfClusters) + } +} + +show.Rcpp_CecModel <- function(object) { + summary(object) +} + +isParameterOn <- function(x) { + return(length(x) != 0) +} diff --git a/R/gmum.R b/R/gmum.R new file mode 100644 index 00000000..aea092ed --- /dev/null +++ b/R/gmum.R @@ -0,0 +1,36 @@ +# Silences R CMD check warnings +utils::globalVariables(c(".rs.getAnywhere.original", ".rs.getAnywhere")) +utils::globalVariables(c("X1", "X2", "label")) + +#' @importFrom grDevices rainbow +#' @importFrom graphics hist pairs par plot plot.new points title +#' @importFrom stats na.omit prcomp predict rnorm runif update +#' @importFrom utils data read.csv +#' @importFrom Matrix Matrix +#' @importFrom methods as hasArg is new +#' @importFrom grid viewport pushViewport grid.newpage grid.layout +NULL + +# Lazy loading to allow for discovery of all files +evalqOnLoad( { + # Autocompletion override + autocompl <- function(x, pattern="") { + targets <- c(asNamespace("Rcpp")$complete(x), x[['.staticFields']]) + grep(pattern, targets, value = TRUE)[! (substr(grep(pattern, targets, value = TRUE),1,1)==".")] + } + + `.DollarNames.Rcpp_C++Object` <<- autocompl + .DollarNames.Rcpp_SVMClient <<- autocompl + .DollarNames.Rcpp_GNGServer <<- autocompl + .DollarNames.Rcpp_CecModel <<- autocompl + + if(!exists(".rs.getAnywhere")) { + .rs.getAnywhere <- NULL # Silences R CMD check warning + } + + # Workaround RStudio bug + if(exists(".rs.getAnywhere") && !exists(".rs.getAnywhere.original")) { + .rs.getAnywhere.original <<- .rs.getAnywhere + .rs.getAnywhere <<- function(a, envir=.GlobalEnv){ .rs.getAnywhere.original(a, .GlobalEnv) } + } +}) diff --git a/R/gmum.errors.R b/R/gmum.errors.R new file mode 100644 index 00000000..98ebc62c --- /dev/null +++ b/R/gmum.errors.R @@ -0,0 +1,12 @@ +# Error codes + +gmum.error <- function(code, message){ + return(paste(code, ": ", message, sep="")) +} + +GMUM_ERROR = "Error" +GMUM_WRONG_LIBRARY = "Error 20" +GMUM_WRONG_KERNEL = "Error 21" +GMUM_BAD_PREPROCESS = "Error 22" +GMUM_NOT_SUPPORTED = "Error 23" +GMUM_WRONG_PARAMS = "Error 24" diff --git a/R/gng.R b/R/gng.R new file mode 100644 index 00000000..2aa9f020 --- /dev/null +++ b/R/gng.R @@ -0,0 +1,984 @@ +library(methods) + +#' @import igraph +#' @importFrom ggplot2 scale_size_continuous scale_size_identity geom_point aes ggplot geom_tile scale_fill_brewer scale_alpha_identity scale_colour_brewer geom_abline +NULL + +#' Use first two spatial coordinates as position in layout +#' +#' @note You can pass any igraph layout algorithm to plot +#' +#' @param g GNG object +#' +#' @export +gng.plot.layout.v2d <- function(g){ + cbind(V(g)$v0, V(g)$v1) +} + +gng.plot.color.label <- 'label' + +gng.plot.color.fast.cluster <- 'fast.cluster' + +gng.plot.color.cluster <- 'cluster' + +gng.plot.color.none <- 'none' + +gng.plot.layout.igraph.fruchterman.fast <- layout.fruchterman.reingold + +gng.plot.layout.igraph.auto <- layout.auto + +gng.plot.2d <- "2d" + +gng.plot.2d.errors <- "2d.errors" + +gng.type.default <- function(){ + c(2) +} + +gng.type.optimized <- function(minimum=0, maximum=10){ + c(0, minimum, maximum) +} + +gng.type.utility<- function(k=1.3){ + c(1, k) +} + +.gng.dataset.bagging.prob <- 3 +.gng.dataset.bagging <- 2 +.gng.dataset.sequential <-1 + +.GNG <- NULL + +#' Plot GNG +#' +#' @title plot GNG object +#' @description Plot resulting graph using igraph plotting +#' @rdname plot.gng +#' @export +#' @method plot Rcpp_GNGServer +#' +#' @param x GNG object +#' @param mode \code{"2d"} (igraph plot) +#' \code{"2d.errors"} (igraph plot with mean error log plot) +#' +#' @param layout igraph layout to be used when plotting. Defaults to \code{layout.fruchterman.reingold}. +#' Other good choice is using \code{gng.plot.layout.v2d}, which returns two first spatial coordinates. +#' +#' @param vertex.color How to color vertexes. Possible values: \code{"fast.cluster"} (vertex color is set to fastgreedy.community clustering), +#' \code{"label"} (rounds to integer label if present), \code{list of integers} (colors vertices according to provided list), \code{"none"} (every node is white), +#' +#' @param vertex.size Size of plotted vertices +#' @param ... other arguments not used by this method. +#' +#' @note If you want to "power-use" plotting and plot for instance a subgraph, you might be interested in +#' exporting igraph with convertToIGraph function +#' +#' @examples +#' \dontrun{ +#' gng <- GNG(scaled.wine) +#' # Plots igraph using first 2 coordinates and colors according to clusters +#' plot(gng, mode=gng.plot.2d.errors, layout=gng.plot.layout.v2d, vertex.color=gng.plot.color.cluster) +#' +#' # For more possibilities see gng.plot.* constants +#' } +plot.Rcpp_GNGServer <- NULL + +#' Save model to binary format +#' @title gngSave +#' @description Writes model to a disk space efficient binary format. +#' @export +#' +#' @param object GNG object +#' @param filename File where binary will be saved +gngSave <- NULL + + +#' Load model from binary format +#' +#' @title gngLoad +#' @description Writes model to a disk space efficient binary format. +#' @export +#' +#' @param filename Binary file location +gngLoad <- NULL + +#' Get centroids +#' +#' @title calculateCentroids +#' @description Using passed community.detection finds communities and for each community pick node with biggest betweenness score +#' @export +#' +#' @param object GNG object +#' @param community.detection.algorithm Used algorithm from igraph package, by default spinglass.community +#' +#' @examples +#' \dontrun{ +#' gng <- GNG(gng.preset.sphere(100)) +#' print(node(gng, calculateCentroids(gng)[1])$pos) +#' } +calculateCentroids <- NULL + +#' Find closest node +#' +#' @title findClosests +#' @description Finds closest node from given list to vector. Often used together with calculateCentroids +#' @export + +#' @param object GNG object +#' @param node.ids List of indexes of nodes in gng. +#' @param x Can be either \code{vector} or \code{data.frame.} +#' +#' @examples +#' \dontrun{ +#' gng <- GNG(gng.preset.sphere(100)) +#' # Find closest centroid to c(1,1,1) +#' found.centroids <- calculateCentroids(gng) +#' findClosests(gng, found.centroids, c(1,1,1)) +#' } +findClosests <- NULL + +#' Check if GNG is running +#' +#' @title isRunning +#' @description Returns TRUE if GNG object is training +#' @export +#' @param object GNG object +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' # FALSE, because did not pass train.online to constructor +#' print(isRunning(gng)) +#' +isRunning <- function(object) { + return(object$.isRunning()) +} + +#' Find closest component +#' @name predictComponent +#' @title predictComponent +#' @description Finds connected component closest to given vector(s). On the first +#' execution of function strongly connected components are calculated using igraph::cluster function. +#' @export +#' @rdname predictComponent-methods +#' @docType methods +#' +#' @param object GNG object +#' @param x Can be either \code{vector} or \code{data.frame}. +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' # Find closest component to c(1,1,1) +#' predictComponent(gng, c(1,1,1)) +#' +#' @aliases predictComponent +predictComponent <- NULL + +#' Get GNG node +#' @name node +#' @title node +#' @description Retrieves node from resulting graph +#' @rdname node-methods +#' @export +#' +#' @param x GNG object +#' @param gng_id Id of the node to retrieve. This is the id returned by functions like predict, or centroids +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' print(node(gng, 10)$pos) +#' +#' @aliases node +#' +node <- function(x, gng_id) UseMethod("node") + +#' Predict +#' @name predict.gng +#' @title predict +#' @description Retrieves prediction from trained GNG model +#' @rdname predict.gng +#' @export +#' +#' @param object Trained model +#' @param x Vector or matrix of examples +#' @param ... other arguments not used by this method +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' predict(gng, c(1,2,2)) +predict.Rcpp_GNGServer <- function(object, x, ...){ + if( is.vector(x)){ + object$.predict(x) + }else{ + if ( !is(x, "data.frame") && !is(x, "matrix") && !is(x,"numeric") ) { + stop(gmum.error(GMUM_WRONG_PARAMS, "Wrong target class, please provide data.frame, matrix or numeric vector")) + } + + if (!is(x, "matrix")) { + x <- data.matrix(x) + } + + y <- rep(NA, nrow(x)) + + for(i in 1:nrow(x)){ + y[i] <- object$.predict(x[i,]) + } + + y + } +} + +#' @export +node.Rcpp_GNGServer <- NULL + +#' @name run +#' @title run +#' @rdname run-methods +#' @description Run algorithm (in parallel) +#' @export +#' +#' @param object GNG object +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' run(gng) +#' print(isRunning(gng)) +#' +run <- function(object) UseMethod("run") + +#' @export +run.Rcpp_GNGServer <- NULL + +#' @title pause +#' @description Pause algorithm +#' @export +#' +#' @param object GNG object +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' pause(gng) +#' print(isRunning(gng)) +pause <- function(object) UseMethod("pause") + +#' @export +pause.Rcpp_GNGServer <- NULL + +#' @title terminate +#' @name terminate +#' @description Terminate algorithm +#' @export +#' @rdname terminate-methods +#' @docType methods +#' +#' @param object GNG object +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' terminate(gng) +#' +#' @aliases terminate +#' +terminate <- function(object) UseMethod("terminate") + +#' @export +terminate.Rcpp_GNGServer <- NULL + +#' @title meanError +#' @description Gets mean error of the graph (note: blocks the execution, O(n)) +#' @param object GNG object +#' +#' @export +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' meanError(gng) +meanError <- NULL + +#' @title errorStatistics +#' @description Gets vector with errors for every second of execution +#' @export +#' +#' @param object GNG object +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' errorStatistics(gng) +errorStatistics <- NULL + +#' @title Constructor of Optimized GrowingNeuralGas object. +#' @rdname optimized-gng +#' +#' @export +#' +#' @description Construct simplified and optimized GNG object. Can be used to train offline, or online. Data dimensionality shouldn't be too big, if +#' it is consider using dimensionality reduction techniques. +#' +#' @param beta Decrease the error variables of all node +#' nodes by this fraction (forgetting rate). Default 0.99 +#' +#' @param alpha Decrease the error variables of the nodes neighboring to +#' the newly inserted node by this fraction. Default 0.5 +#' +#' @param lambda New vertex is added every lambda iterations. Default 200 +#' +#' @param max.nodes Maximum number of nodes +#' (after reaching this size it will continue running, but new noes won't be added) +#' +#' @param eps.n Strength of adaptation of neighbour node. Default \code{0.0006} +#' +#' @param eps.w Strength of adaptation of winning node. Default \code{0.05} +#' +#' @param max.iter If training offline will stop if exceedes max.iter iterations. Default \code{200} +#' +#' @param train.online If used will run in online fashion. Default \code{FALSE} +#' +#' @param min.improvement Used for offline (default) training. +#' Controls stopping criterion, decrease if training stops too early. Default \code{1e-3} +#' +#' @param dim Used for training online, specifies dataset example dimensionality +#' +#' @param value.range All example features should be in this range, required for optimized version of the algorithm. Default \code{(0,1)} +#' +#' @param x Passed data (matrix of data.frame) for offline training +#' +#' @param labels Every example can be associated with labels that are added to nodes later. By default empty +#' +#' @param max.edge.age Maximum edge age. Decrease to increase speed of change of graph topology. Default \code{200} +#' +#' @param verbosity How verbose should the process be, as integer from \eqn{[0,6]}, default: \code{0} +#' +#' @param seed Seed for internal randomization +#' +#' @examples +#' \dontrun{ +#' # Train online optimizedGNG. All values in this dataset are in the range (-4.3, 4.3) +#' X <- gng.preset.sphere(100) +#' gng <- OptimizedGNG(train.online = TRUE, value.range=c(min(X), max(X)), dim=3, max.nodes=20) +#' insertExamples(gng, X) +#' run(gng) +#' Sys.sleep(10) +#' pause(gng) +#' } +OptimizedGNG <- NULL + +#' @name clustering +#' @title clustering +#' +#' @description Gets vector with node indexes assigned to examples in the dataset +#' +#' @method clustering Rcpp_GNGServer +#' @export +#' +#' @rdname clustering-methods +#' +#' @docType methods +#' +#' @examples +#' gng <- GNG(gng.preset.sphere(100)) +#' clustering(gng) +clustering.Rcpp_GNGServer <- NULL + +#' @title Constructor of GrowingNeuralGas object. +#' +#' @rdname gng +#' +#' @export +#' +#' @description Construct GNG object. Can be used to train offline, or online. +#' +#' @param beta Decrease the error variables of all node +#' nodes by this fraction (forgetting rate). Default 0.99 +#' +#' @param alpha Decrease the error variables of the nodes neighboring to +#' the newly inserted node by this fraction. Default 0.5 +#' +#' @param lambda Every lambda iteration is added new vertex. Default 200 +#' +#' @param max.nodes Maximum number of nodes +#' (after reaching this size it will continue running, but won't add new nodes) +#' +#' @param eps.n How strongly adapt neighbour node. Default \code{0.0006} +#' +#' @param eps.w How strongly adapt winning node. Default \code{0.05} +#' +#' @param max.iter Uf training offline will stop if exceedes max.iter iterations. Default \code{200} +#' +#' @param train.online default FALSE. If used will run in online fashion +#' +#' @param min.improvement Used for offline (default) training. +#' Controls stopping criterion, decrease if training stops too early. Default \code{1e-3} +#' +#' @param dim Used for training online, specifies training example size +#' +#' @param k Utility constant, by default turned off. Good value is 1.3. Constant controlling speed of erasing obsolete nodes, +#' see \url{http://sund.de/netze/applets/gng/full/tex/DemoGNG/node20.html} +#' +#' @param x Passed data (matrix of data.frame) for offline training +#' +#' @param labels Every example can be associated with labels that are added to nodes later. By default empty +#' +#' @param max.edge.age Maximum edge age. Decrease to increase speed of change of graph topology. Default \code{200} +#' +#' @param verbosity How verbose should the process be, as integer from \eqn{[0,6]}, default: \code{0} +#' +#' @param seed Seed for internal randomization +#' +#' @examples +#' \dontrun{ +#' X <- gng.preset.sphere(100) +#' y <- round(runif(100)) +#' # Train in an offline manner +#' gng <- GNG(X, labels=y, max.nodes=20) +#' # Plot +#' plot(gng) +#' +#' # Train in an online manner with utility (erasing obsolete nodes) +#' gng <- GNG(max.nodes=20, train.online=TRUE, k=1.3, dim=3) +#' insertExamples(gng, X, labels=y) +#' run(gng) +#' Sys.sleep(10) +#' terminate(gng) +#' # Plot +#' plot(gng) +#' } +GNG <- NULL + +#' @title convertToIGraph +#' @description Converts GNG to igraph object, where every vertex contains attributes gng.index, error, data.label and 3 first spatial coordinates (as attributes v0, v1, v2). +#' Additionally utility attribute is present if utility GNG is used. +#' +#' @param object GNG object +#' @param calculate.dist If true will calculate all \code{n^2} distances in the graph +#' +#' @export +convertToIGraph <- NULL + +#' @title numberNodes +#' @description Get current number of nodes in the graph +#' +#' @param object GNG object +#' +#' @export +numberNodes <- function(object){ + object$getNumberNodes() +} + +#' @name insertExamples +#' @title insertExamples +#' @description Insert examples with optional labels. +#' +#' @export +#' +#' @param object GNG object +#' @param examples \code{matrix} or \code{data.frame} with rows as examples. Note: if training online make sure +#' number of columns matches dim parameter passed to GNG constructor. +#' @param labels \code{vector} of labels, that will be associated with nodes in the graph. GNG will assign to each +#' node a mean of labels of closest examples. +#' +#' @examples +#' X <- gng.preset.sphere(100) +#' gng <- GNG(X, train.online=TRUE) +#' # Add more examples +#' X = gng.preset.sphere(100) +#' insertExamples(gng, X) +#' +#' @note It copies your examples twice in RAM. You might want to use object$.insertExamples. +insertExamples <- NULL + +.GNG <- function(x=NULL, labels=c(), + beta=0.99, + alpha=0.5, + max.nodes=100, + eps.n=0.0006, + eps.w= 0.05, + max.edge.age = 200, + type = gng.type.default(), + max.iter=200, + train.online=FALSE, + min.improvement=1e-3, + lambda=200, + dim=-1, + verbosity=0, + seed=-1 +){ + + + config <- new(GNGConfiguration) + + config$seed = seed + + if(is.data.frame(x)){ + warning("Converting data.frame to matrix. Please make sure you pass only numerics to GNG.") + x <- as.matrix(x) + } + + # Fill in configuration + if(train.online){ + if(is.null(x)){ + if (dim == -1) { + stop(gmum.error(GMUM_WRONG_PARAMS, "To train online, please pass desired dimensionality in dim parameter")) + } + config$dim = dim + }else{ + config$dim = ncol(x) + } + config$max_iter = -1 + }else{ + config$dim = ncol(x) + config$max_iter = max.iter + } + + if(type[1] == gng.type.optimized()[1]){ + config$.uniformgrid_optimization = TRUE + config$.lazyheap_optimization = TRUE + config$.set_bounding_box(type[2], type[3]) + + if(!train.online){ + if(!max(x) <= type[3] && !min(x) >= type[2]){ + stop(gmum.error(GMUM_WRONG_PARAMS, "Passed incorrect parameters. The dataset is not in the defined range")) + } + } + + }else{ + config$.uniformgrid_optimization = FALSE + config$.lazyheap_optimization = FALSE + } + + if(type[1] == gng.type.utility()[1]){ + config$.experimental_utility_k = type[2] + config$.experimental_utility_option = 1 + } + else{ + config$.experimental_utility_option = 0 + } + + + config$.dataset_type=.gng.dataset.bagging + config$beta = beta + config$max_edge_age = max.edge.age + config$alpha = alpha + config$max_nodes = max.nodes + config$eps_n = eps.n + config$eps_w = eps.w + + config$lambda = lambda + config$verbosity = verbosity + + if(!config$.check_correctness()){ + stop(gmum.error(GMUM_WRONG_PARAMS, "Passed incorrect parameters.")) + } + + # Construct server + server = new(GNGServer, config) + + + if(train.online){ + if(!is.null(x)){ + insertExamples(server, x, labels) + run(server) + } + } + if(!train.online){ + + print("Training offline") + if(is.null(x)){ + stop(gmum.error(GMUM_ERROR, "Passed null data and requested training offline")) + }else{ + insertExamples(server, x, labels) + run(server) + + max_iter = max.iter + min_relative_dif = min.improvement + iter = 0 + previous_iter = -1 + best_so_far = 1e10 + initial_patience = 3 + error_index = -1 # always bigger than 0 + patience = initial_patience + + tryCatch({ + + while(server$getCurrentIteration() == 0 || server$.isRunning()){} + + # max_iter is checked in GNG + while(iter == 0 || server$.isRunning()){ + Sys.sleep(0.1) + iter = server$getCurrentIteration() + + if(previous_iter != iter && iter %% (max_iter/100) == 0){ + print(paste("Iteration", iter)) + } + + + + if(length(server$getErrorStatistics()) > 5){ + errors = server$getErrorStatistics() + + best_previously = min(errors[(length(errors)-5):length(errors)]) + + # this is same as (best_so_far-best_previously)/best_so_far < min_relative_di + # we get minimum of window 5 and look at the history + if( (error_index - server$.getGNGErrorIndex()) > 4 && + (best_so_far - best_previously) < best_so_far*min_relative_dif){ + patience = patience - 1 + if(patience <= 0){ + print(sprintf("Best error during training: %f", best_so_far)) + print(sprintf("Best error in 5 previous iterations %f", best_previously)) + print(errors[(length(errors)-5):length(errors)]) + print("Patience (which you can control) elapsed, bailing out") + break + } + }else{ + patience = initial_patience + } + + + error_index = server$.getGNGErrorIndex() + best_so_far = min(best_previously, best_so_far) + } + + } + + print(paste("Iteration", iter)) + previous_iter = iter + + + + if(server$.isRunning()){ + terminate(server) + } + + server$.updateClustering() + + }, interrupt= + function(interrupt){ + if(server$.isRunning()){ + terminate(server) + } + + }) + + } + }else{ + } + + + + server +} + + +GNG <- function(x=NULL, labels=c(), + beta=0.99, + alpha=0.5, + max.nodes=1000, + eps.n=0.0006, + eps.w=0.05, + max.edge.age=200, + train.online=FALSE, + max.iter=200, + dim=-1, + min.improvement=1e-3, + lambda=200, + verbosity=0, + seed=-1, + k=NULL +){ + gng <- NULL + call <- match.call(expand.dots = TRUE) + if(is.null(k)){ + gng <- .GNG(x=x, labels=labels, beta=beta, alpha=alpha, max.nodes=max.nodes, + eps.n=eps.n, eps.w=eps.w, max.edge.age=max.edge.age, type=gng.type.default(), train.online=train.online, max.iter=max.iter, dim=dim, min.improvement=min.improvement, lambda=lambda, verbosity=verbosity, seed=seed) + }else{ + gng <- .GNG(x=x, labels=labels, beta=beta, alpha=alpha, max.nodes=max.nodes, + eps.n=eps.n, eps.w=eps.w, max.edge.age=max.edge.age, type=gng.type.utility(k=k), train.online=train.online, max.iter=max.iter, dim=dim, min.improvement=min.improvement, lambda=lambda, verbosity=verbosity, seed=seed) + } + assign("call", call, gng) + gng +} + +OptimizedGNG <- function(x=NULL, labels=c(), + beta=0.99, + alpha=0.5, + max.nodes=1000, + eps.n=0.0006, + eps.w= 0.05, + max.edge.age = 200, + train.online=FALSE, + max.iter=200, + dim=0, + min.improvement=1e-3, + lambda=200, + verbosity=0, + seed=-1, + value.range=c(0,1) +){ + if(value.range[1] >= value.range[2]){ + stop(gmum.error(GMUM_ERROR, "Incorrect range")) + return() + } + call <- match.call(expand.dots = TRUE) + gng <- .GNG(x=x, labels=labels, beta=beta, alpha=alpha, max.nodes=max.nodes, + eps.n=eps.n, eps.w=eps.w, max.edge.age=max.edge.age, type=gng.type.optimized(minimum=value.range[1], maximum=value.range[2]), train.online=train.online, max.iter=max.iter, dim=dim, min.improvement=min.improvement, lambda=lambda, verbosity=verbosity, seed=seed) + assign("call", call, gng) + gng +} + +predictComponent <- function(object, x){ + tryCatch(if(is.null(object$components.membership)){ + assign("components.membership", clusters(convertToIGraph(object))$membership, object) + }, error=function(...) + assign("components.membership", clusters(convertToIGraph(object))$membership, object)) + + object$components.membership[predict(object, x)] +} + +plot.Rcpp_GNGServer <- function(x, vertex.color=gng.plot.color.cluster, + layout=layout.fruchterman.reingold, mode=gng.plot.2d, + vertex.size=3, ...){ + if(vertex.size <= 0){ + stop("Please pass positivie vertex.size") + } + + if(!(is.list(vertex.color) || is.vector(vertex.color) || vertex.color %in% c(gng.plot.color.cluster, + gng.plot.color.fast.cluster, gng.plot.color.label, gng.plot.color.none))){ + stop("Please pass correct vertex.color") + } + + + if(x$getNumberNodes() > 4000){ + warning("Trying to plot very large graph (>4000 nodes). It might take a long time especially if using layout function.") + } + + if(x$getNumberNodes() == 0){ + warning("Empty graph") + return() + } + + if(mode == gng.plot.2d){ + .gng.plot2d(x, vertex.color, layout, vertex.size=vertex.size) + } + else if(mode == gng.plot.2d.errors){ + .gng.plot2d.errors(x, vertex.color, layout, vertex.size=vertex.size) + }else{ + stop("Unrecognized mode") + } +} + + +node.Rcpp_GNGServer <- function(x, gng_id){ + x$getNode(gng_id) +} + +run.Rcpp_GNGServer <- function(object){ + # Invalidate components + assign("components.membership", NULL, object) + object$.run() +} + +pause.Rcpp_GNGServer <- function(object){ + object$.pause() + n = 0.0 + sleep = 0.1 + while(object$.isRunning()){ + Sys.sleep(sleep) + n = n + 1 + if(n > 2/sleep){ + print("Warning: GNG has not paused! Check status with gng$.isRunning(). Something is wrong.") + return() + } + } +} + +terminate.Rcpp_GNGServer <- function(object){ + object$.terminate() +} + +meanError <- function(object){ + object$getMeanError() +} + +errorStatistics <- function(object){ + object$getErrorStatistics() +} + +clustering.Rcpp_GNGServer <- function(c){ + c$getClustering() +} + +gngSave <- function(object, filename){ + warning("Saving does not preserve training history") + object$.save(filename) +} + +gngLoad <- function(filename){ + warning("Saving does not preserve training history") + fromFileGNG(filename) +} + +calculateCentroids <- function(object, community.detection.algorithm=spinglass.community){ + ig <- convertToIGraph(object) + + cl = clusters(ig) + components = lapply(levels(as.factor(cl$membership)), function(x) induced.subgraph(ig, cl$membership==as.numeric(x))) + + centroids <- c() + for(cc in components){ + communities <- community.detection.algorithm(cc) + for(i in 1:length(communities)){ + #Get subcommunity + community_graph <- induced.subgraph(cc, which(membership(communities)==i)) + #Get index of centroid (which is ordered by betwenness) + centroid_index = which(order(betweenness(community_graph))==1) + # Append + centroids<- c(centroids, V(community_graph)$gng.index[centroid_index]) + } + } + centroids +} + + +convertToIGraph <- function(object, calculate.dist=TRUE){ + was_running = object$.isRunning() + if(was_running){ + pause(object) + } + + if(object$getNumberNodes() == 0){ + return(graph.empty(n=0, directed=FALSE)) + } + + #Prepare index map. Rarely there is a difference in indexing + #due to a hole in memory representation of GNG graph (i.e. + #indexing in gng can be non-continuous) + + # Warning: This is a hack. If there is a bug look for it here + indexesGNGToIGraph <- 1:(2*object$.getLastNodeIndex()) + indexesIGraphToGNG <- 1:object$getNumberNodes() + + if(object$.getLastNodeIndex() != object$getNumberNodes()){ + igraph_index = 1 + for(i in (1:object$.getLastNodeIndex())){ + node <- node(object, i) + if(length(node) != 0){ + indexesGNGToIGraph[i] = igraph_index + indexesIGraphToGNG[igraph_index] = i + igraph_index = igraph_index + 1 + } + } + } + + adjlist<-list() + for(i in 1:object$.getLastNodeIndex()){ + node <- node(object, i) + if(length(node) != 0){ + + igraph_index = indexesGNGToIGraph[i] + #print(paste(igraph_index, node$neighbours)) + neighbours = node$neighbours[node$neighbours > i] + adjlist[[igraph_index]] <- sapply(neighbours, function(x){ indexesGNGToIGraph[x] }) + } else{ + print("Empty node") + } + } + + + g <- graph.adjlist(adjlist, mode = "all", duplicate=FALSE) + for(i in 1:object$.getLastNodeIndex()){ + node <- node(object, i) + if(length(node) != 0){ + igraph_index = indexesGNGToIGraph[i] + #TODO: it is more efficient to assign whole vectors + #TODO: refactor in whole code v0 v1 v2 to pos_1 pos_2 pos_3 + V(g)[igraph_index]$v0 <- node$pos[1] + V(g)[igraph_index]$v1 <- node$pos[2] + V(g)[igraph_index]$v2 <- node$pos[3] + V(g)[igraph_index]$label <- node$index + V(g)[igraph_index]$data.label <- node$label + V(g)[igraph_index]$error <- node$error + V(g)[igraph_index]$gng.index <- node$index + if(!is.null(node$utility)){ + V(g)[igraph_index]$utility = node$utility + } + } + } + + if(calculate.dist){ + # Add distance information + dists <- apply(get.edges(g, E(g)), 1, function(x){ + object$.nodeDistance(indexesIGraphToGNG[x[1]], indexesIGraphToGNG[x[2]]) + }) + E(g)$dists = dists + } + + if(was_running){ + run(object) + } + + g +} + +findClosests <- function(object, node.ids, x){ + .findClosests <- function(object, node.ids, x){ + # Returns all dists from given pos to given nodes + get_all_dists <- function(pos, nodes, gng){ + sapply(nodes, function(node_index) sqrt(sum((pos-node(gng, node_index)$pos)^2))) + } + + which.min(get_all_dists(x, node.ids, object)) + } + if( is.vector(x)){ + .findClosests(object, node.ids, x) + }else{ + if ( !is(x, "data.frame") && !is(x, "matrix") && !is(x,"numeric") ) { + stop(gmum.error(GMUM_WRONG_PARAMS, "Wrong target class, please provide data.frame, matrix or numeric vector")) + } + + if (!is(x, "matrix")) { + x <- data.matrix(x) + } + + y <- rep(NA, nrow(x)) + + for(i in 1:nrow(x)){ + y[i] <- .findClosests(object, node.ids, x[i,]) + } + + y + } +} + +insertExamples <- function(object, examples, labels=c()){ + if(length(labels) == 0){ + object$.insertExamples(examples) + }else if(typeof(labels) == "character"){ + if(typeof(labels) == "list"){ + if(is.null(examples$labels)){ + stop(gmum.error(GMUM_WRONG_PARAMS, "Empty labels column")) + }else{ + label.column <- examples$labels + examples$labels <- NULL + object$.insertLabeledExamples(examples, label.column) + } + }else{ + stop(gmum.error(GMUM_WRONG_PARAMS, "Please pass data frame")) + } + }else{ + object$.insertLabeledExamples(examples, labels) + } +} + +loadModule('gng_module', TRUE) + +#' Class Rcpp_GNGServer. +#' +#' Class \code{Rcpp_GNGServer} defines a GNGServer class. +#' +#' @name Rcpp_GNGServer-class +#' @exportClass Rcpp_GNGServer +setClass(Class = "Rcpp_GNGServer") + +# Lazy loading to allow for discovery of all files +evalqOnLoad( { + .wine <<- NULL +}) + diff --git a/R/gng.presets.R b/R/gng.presets.R new file mode 100644 index 00000000..4f4fc0c8 --- /dev/null +++ b/R/gng.presets.R @@ -0,0 +1,154 @@ +#' Cube preset dataset +#' +#' @title gng.preset.cube +#' @description Generate sample cube dataset +#' @export + +#' @param N Number of points +#' @param r Length of the side of cube +#' @param center Center of the plane +#' +#' @examples +#' X <- gng.preset.cube(100) +#' gng <- GNG(X) +#' +gng.preset.cube<-function(N, r=0.5, center=c(0.5,0.5,0.5)){ + .gng.box_point<-function(r, center, prob=-1){ + point <- c() + + if(prob == -1) + point<-center + else + point<-c(center, prob) + + point[1:3] = point[1:3] + runif(3, min=-r/2.0, max=r/2.0) + + point +} + + + mat<-matrix(0,N,3) + + for(i in 1:N){ + mat[i,] = .gng.box_point(r=r, center=center) + } + + mat +} + + +#' Plane preset dataset +#' +#' @title gng.preset.plane +#' @description Generate sample plane dataset +#' @export + +#' @param N Number of points +#' @param side Length of the side of plane +#' @param center Center of the plane +#' +#' @examples +#' X <- gng.preset.plane(100) +#' gng <- GNG(X) +#' +gng.preset.plane<-function(N, side=0.5, center=c(0.5,0.5,0.5)){ +.gng.plane_point<-function(r,center){ + if(!hasArg(r)) r<-1.0 + if(!hasArg(center)) center<-c(0,0,0) + + point<-center + + point[1]<-point[1]+r*runif(1.0) + point[2]<-point[2]+r*runif(1.0) + point[3]<-point[3] + + return(point) +} + mat<-matrix(0,N,3) + + for(i in 1:N){ + mat[i,] = .gng.plane_point(side, center) + mat[i,3] = mat[i,1] + } + + mat +} + + +#' Sphere preset dataset +#' +#' @title gng.preset.sphere +#' @description Generate sample sphere dataset +#' @export + +#' @param N Number of points +#' @param r Radius of the sphere +#' @param center Center of the sphere +#' +#' @examples +#' X <- gng.preset.sphere(100) +#' gng <- GNG(X) +gng.preset.sphere<-function(N, r=0.5, center=c(0.5,0.5,0.5)){ +.gng.sphere_point<-function(r,center){ + if(!hasArg(r)) r<-1.0 + if(!hasArg(center)) center<-c(0,0,0) + + alpha<-runif(1)*2*pi + beta<-runif(1)*pi + + point<-center + + point[1]<-point[1]+r*cos(alpha)*sin(beta) + point[2]<-point[2]+r*sin(alpha)*sin(beta) + point[3]<-point[3]+r*cos(beta) + + return(point) +} + + + + mat<-matrix(0,N,3) + + for(i in 1:N){ + mat[i,] = .gng.sphere_point(r, center) + } + + mat +} + + +.sigmoid <- function(x){ + 1./(1.+exp(-x)) +} + +gng.preset_potential<-function(N, r=0.5, center=c(0.5,0.5,0.5), prob=-1){ + + mat <- c() + if(prob == -1){ + mat <- matrix(rnorm(20,mean=1), N,3) + } + else{ + mat <- matrix(rnorm(20,mean=1), N,4) + } + + + for(j in 1:N){ + t<-rnorm(1,mean=0,sd=1) + u<-rnorm(1,mean=0,sd=1) + val<-.sigmoid(t^2+u^2); + mat[j,1] = t + mat[j,2] = u + mat[j,3] = val + if(prob!=-1) mat[j,4] = prob + + } + + mat +} + + + + + + + diff --git a/R/gng.utils.R b/R/gng.utils.R new file mode 100644 index 00000000..defe1dbe --- /dev/null +++ b/R/gng.utils.R @@ -0,0 +1,127 @@ +#' @importFrom httr GET content + +#' @export +#' @rdname print.gng +#' @method print Rcpp_GNGServer +#' +#' @title print +#' +#' @description Print basic information about GNG object +#' +#' @docType methods +#' +#' @param x GNG object model. +#' @param ... other arguments not used by this method. +print.Rcpp_GNGServer <- NULL + +#' Summary of GNG object +#' @export +#' @rdname summary.gng +#' @method summary Rcpp_GNGServer +#' +#' @title summary +#' +#' @description Print basic information about GNG object +#' +#' @docType methods +#' +#' @param object GNG object model. +#' @param ... other arguments not used by this method. +summary.Rcpp_GNGServer <- NULL + + +print.Rcpp_GNGServer <- function(x, ...){ + print(sprintf("Growing Neural Gas, %d nodes with mean error %f", + x$getNumberNodes(), x$getMeanError())) +} + +summary.Rcpp_GNGServer <- function(object, ...){ + if(object$.getConfiguration()$.uniformgrid_optimization){ + print("(Optimized) Growing Neural Gas") + }else{ + print("Growing Neural Gas") + } + if(exists("object$call")){ + print(object$call) + } + if(object$hasStarted()){ + print(sprintf("%d nodes with mean error %f", + object$getNumberNodes(), object$getMeanError())) + + print(sprintf("Trained %d iterations", object$getCurrentIteration())) + print("Mean errors[s]: ") + errors = object$getErrorStatistics() + if(length(errors) > 10){ + errors = errors[(length(errors)-10):length(errors)] + } + + print(errors) + } +} + +show.Rcpp_GNGServer <- function(object) { + summary(object) +} + +setMethod("show", "Rcpp_GNGServer", show.Rcpp_GNGServer) + +#' Retrieves wine dataset design matrix from UCI repository +#' +#' @title get.wine.dataset.X +#' +#' @param scale if TRUE will perform feature scaling +#' +#' @export +get.wine.dataset.X <- function(scale=TRUE){ + if(!exists(".wine") || is.null(.wine)) { + a <- GET("https://archive.ics.uci.edu/ml/machine-learning-databases/wine/wine.data") + .wine <<- read.csv(textConnection(content(a)), header=F) + } + + if(scale) { + return(as.matrix(scale(.wine[-1]))) + } else { + return(.wine[-1]) + } +} + +#' Retrieves wine dataset labels from UCI repository +#' +#' @title get.wine.dataset.y +#' +#' @export +get.wine.dataset.y <- function(){ + # Hack for R CMD check. Note that it is cleaner to assign (see predictComponent) + if(!exists(".wine") || is.null(.wine)) { + a <- GET("https://archive.ics.uci.edu/ml/machine-learning-databases/wine/wine.data") + .wine <<- read.csv(textConnection(content(a)), header=F) + } + return(.wine[,1]) +} + +.plane.point<-function(r,center){ + if(!hasArg(r)) r<-1.0 + if(!hasArg(center)) center<-c(0,0,0) + + point<-center + point[1]<-point[1]+r*runif(1.0) + point[2]<-point[2]+r*runif(1.0) + point[3]<-point[3] + + return(point) +} + +.sphere.point<-function(r,center){ + if(!hasArg(r)) r<-1.0 + if(!hasArg(center)) center<-c(0,0,0) + + alpha<-runif(1)*2*pi + beta<-runif(1)*pi + + point<-center + point[1]<-point[1]+r*cos(alpha)*sin(beta) + point[2]<-point[2]+r*sin(alpha)*sin(beta) + point[3]<-point[3]+r*cos(beta) + + return(point) +} diff --git a/R/gng.visualize.R b/R/gng.visualize.R new file mode 100644 index 00000000..b69b1ff3 --- /dev/null +++ b/R/gng.visualize.R @@ -0,0 +1,81 @@ + +.gng.plot2d.errors<-function(gngServer, vertex.color, layout, vertex.size=3){ + ig <- convertToIGraph(gngServer) + + if(length(V(ig))==0){ + return() + } + + if(vertex.color == 'label'){ + vertex.color = c(1:length(V(ig))) + max_col = 0 + for(label in V(ig)$label) + max_col = max(max_col, round(label)) + cols = rainbow(max_col+1) + vertex.color = cols[as.double(lapply(V(ig)$label, round))] + } + + if(vertex.color == 'component'){ + vertex.color <- predictComponent(gngServer, ) + } + + .visualizeIGraph2dWithErrors(ig, vertex.color, layout, gngServer, vertex.size=3) +} + +.gng.plot2d<-function(gngServer, vertex.color, layout, vertex.size=3){ + ig <- convertToIGraph(gngServer) + + if(length(V(ig))==0){ + return() + } + + if(vertex.color == 'label'){ + vertex.color = c(1:length(V(ig))) + max_col = 0 + for(label in V(ig)$data.label) + max_col = max(max_col, round(label)) + cols = rainbow(max_col+1) + vertex.color = cols[as.double(lapply(V(ig)$data.label, round))] + } + + .visualizeIGraph2d(ig, vertex.color, layout, vertex.size=vertex.size) +} + +# Visualize igraph using igraph plot +# It will layout graph using v0 and v1 coordinates +# @note It is quite slow, works for graphs < 2000 nodes, and for graphs <400 when using layout +.visualizeIGraph2d<-function(g, vertex.color, layout, vertex.size=3){ + L<-layout(g) + if(vertex.color == 'cluster'){ + communities <- infomap.community(g) + communities + col <- rainbow(length(communities)) + vertex.color <- col[membership(communities)] + } + else if(vertex.color == 'fast_cluster'){ + l = fastgreedy.community(g)#as.undirected(g)) + col <- rainbow(length(l)) + print(membership(l)) + vertex.color <- col[membership(l)] + } + else if(vertex.color == 'none'){ + vertex.color = NA + }else{ + # Passed something else as vector + } + + plot.igraph(g,vertex.size=vertex.size,vertex.label=NA,vertex.color=vertex.color,layout=L) +} + +.visualizeIGraph2dWithErrors<-function(ig, vertex.color, layout_2d, gng,vertex.size=3){ + plot.new() + par(mfrow=c(1,2)) + .visualizeIGraph2d(ig, vertex.color, layout_2d,vertex.size=vertex.size) + title("Graph visualization") + errors_raw = gng$getErrorStatistics() + errors_raw = errors_raw[5:length(errors_raw)] + errors = errors_raw + #errors = log((errors_raw)/min(errors_raw+1e-4)) + plot(errors, type="l", lty=2, lwd=2, xlab="Batch", ylab="Mean batch error", frame.plot=F) + title("Mean error (log)") +} diff --git a/R/hello_gmum.R b/R/hello_gmum.R deleted file mode 100644 index 4b6210da..00000000 --- a/R/hello_gmum.R +++ /dev/null @@ -1,9 +0,0 @@ -#' Description -#' Hello gmum call! -#' Details -#' Prints out "Hello Gmum" -#' @export -hello_gmum <- function(){ - .Call( "hello_gmum", PACKAGE = "gmum.r" ) -} - diff --git a/R/misc.R b/R/misc.R new file mode 100644 index 00000000..ca8c3f8f --- /dev/null +++ b/R/misc.R @@ -0,0 +1,70 @@ +#' Simple mouse-shaped dataset +#' +#' @name cec.mouse1.spherical +#' +#' @title cec.mouse1.spherical +#' +#' @docType data +#' @keywords data +NULL + +#' Simple dataset consisting in data drawn from set of elliptical gausses +#' +#' @name cec.ellipsegauss +#' +#' @title cec.ellipsegauss +#' +#' @docType data +#' @keywords data +NULL + +#' Extra information for dataset cec.mouse1.extra (energy and cluster assignment) +#' +#' @name cec.mouse1.spherical.extra +#' +#' @title cec.mouse1.spherical.extra +#' +#' @docType data +#' @keywords data +NULL + +#' Extra information for dataset cec.ellipsegauss (energy and cluster assignment) +#' +#' @name cec.ellipsegauss.extra +#' +#' @title cec.ellipsegauss.extra +#' +#' @docType data +#' @keywords data +NULL + +#' Dataset used in transduction demo on website +#' +#' @name svm.transduction +#' +#' @title svm.transduction +#' +#' @docType data +#' @keywords data +NULL + +#' UCI breast cancer dataset +#' +#' @name svm.breastcancer.dataset +#' +#' @title svm.breastcancer.dataset +#' +#' @docType data +#' @keywords data +NULL + +#' Simple dataset in the form of T letter +#' +#' @name Tset +#' +#' @title Tset +#' +#' @docType data +#' @keywords data +NULL + diff --git a/R/svm.R b/R/svm.R new file mode 100644 index 00000000..a95a3973 --- /dev/null +++ b/R/svm.R @@ -0,0 +1,1220 @@ +library(ggplot2) + +#' @title Create SVM object +#' @rdname svm +#' @export +#' +#' @description Create and train SVM model object. +#' +#' @param x Training data without labels in one of the following formats: +#' \code{data.frame}, \code{data.matrix}, \code{SparseM::matrix.csr}, \code{Matrix::Matrix}, +#' \code{slam::simple_triplet_matrix} +#' @param y Labels in one of the followinf formts: \code{factor}, \code{vector}. +#' Recommended type is \code{factor} +#' @param data Can be passed instead of \code{x,} \code{y} pair with \code{formula} to mark the labels +#' column, supported formats are: +#' \code{data.frame}, \code{data.matrix} +#' @param formula Can be passed with \code{data} instead of \code{x}, \code{y} pair, +#' formula needs to point to lables column, for example: \code{target~.} +#' @param core Support Vector Machine library to use in traning, available are: +#' \code{'libsvm'}, \code{'svmlight'}; default: \code{'libsvm'} +#' @param kernel Kernel type as string, available are: \code{'linear'}, \code{'poly'}, +#' \code{'rbf'}, \code{'sigmoid'}; +#' default: \code{'linear'} +#' \itemize{ +#' \item \code{linear}: \eqn{x'*w} +#' \item \code{poly}: \eqn{(gamma*x'*w + coef0)^{degree}} +#' \item \code{rbf}: \eqn{exp(-gamma*|x-w|^2)} +#' \item \code{sigmoid}: \eqn{tanh(gamma*x'*w + coef0)} +#' } +#' @param prep Preprocess method as string, available are: \code{'none'}, \code{'2e'}; +#' default: \code{'none'}. For more information on \code{2eSVM} see: +#' \url{http://www.sciencedirect.com/science/article/pii/S0957417414004138} +#' @param C Cost/complexity parameter, default: \code{1} +#' @param gamma Parameter for \code{poly}, \code{rbf} and \code{sigmoid} kernels, +#' default: \code{1/n_features} +#' @param coef0 For \code{poly} and \code{sigmoid} kernels, default: \code{0} +#' @param degree For \code{poly} kernel, default: \code{3} +#' @param cache_size Cache memory size in MB, default: \code{100} +#' @param tol Tolerance of termination criterion, default: \code{1e-3} +#' @param max.iter Depending on library: +#' \itemize{ +#' \item libsvm: number of iterations after which the training porcess is killed +#' (it can end earlier is desired tolerance is met), default: \code{1e6} +#' \item svmlight: number of iterations after which if there is no progress traning is killed, +#' default: \code{-1} (no limit) +#' } +#' @param transductive.learning Option got SVM model to deduce missing labels from the dataset, +#' default: \code{FALSE} +#' NOTE: this feature is only available with svmlight library, missing labels are marked as +#' \code{'TR'}, if none are found and transductive to \code{TRUE}, label \code{0} will be +#' interpreted as missing +#' @param transductive.posratio Fraction of unlabeled examples to be classified into the positive class +#' as float from \eqn{[0,1]}, default: the ratio of positive and negative examples in the training data +#' @param class.weights Named vector with weight fir each class, default: \code{NULL} +#' @param example.weights Vector of the same length as training data with weights for each traning example, +#' default: \code{NULL} NOTE: this feature is only supported with svmlight library +#' @param class.type Multiclass algorithm type as string, +#' available are: \code{'one.versus.all', 'one.versus.one'}; default: \code{'one.versus.one'} +#' @param verbosity How verbose should the process be, as integer from \eqn{[1,6]}, default: \code{4} +#' @param svm.options enables to pass all svmlight command lines arguments for more advanced options, +#' for details see \url{http://svmlight.joachims.org/} +#' @param ... other arguments not used by this method. +#' +#' @return SVM model object +#' @examples +#' \dontrun{ +#' # train SVM from data in x and labels in y +#' svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) +#' +#' # train SVM using a dataset with both data and lables and a formula pointing to labels +#' formula <- target ~ . +#' svm <- SVM(formula, data, core="svmlight", kernel="rbf", gamma=1e3) +#' +#' # train a model with 2eSVM algorithm +#' data(svm_breast_cancer_dataset) +#' ds <- svm.breastcancer.dataset +#' svm.2e <- SVM(x=ds[,-1], y=ds[,1], core="libsvm", kernel="linear", prep = "2e", C=10); +#' # more at \url{http://r.gmum.net/samples/svm.2e.html} +#' +#' # train SVM on a multiclass data set +#' data(iris) +#' # with "one vs rest" strategy +#' svm.ova <- SVM(Species ~ ., data=iris, class.type="one.versus.all", verbosity=0) +#' # or with "one vs one" strategy +#' svm.ovo <- SVM(x=iris[,1:4], y=iris[,5], class.type="one.versus.one", verbosity=0) +#' +#' # we can use svmlights sample weighting feature, suppose we have weights vector +#' # with a weight for every sample in the traning data +#' weighted.svm <- SVM(formula=y~., data=df, core="svmlight", kernel="rbf", C=1.0, +#' gamma=0.5, example.weights=weights) +#' +#' # svmlight alows us to determine missing labels from a dataset +#' # suppose we have a labels y with missing labels marked as zeros +#' svm.transduction <- SVM(x, y, transductive.learning=TRUE, core="svmlight") +#' +#' # for more in-depth examples visit \url{http://r.gmum.net/getting_started.html} +#' } +SVM <- function(x, ...) UseMethod("SVM") + +#' Class Rcpp_SVMClient. +#' +#' Class \code{Rcpp_SVMClient} defines a SVM model class. +#' +#' @rdname Rcpp_SVMClient-class +#' @exportClass Rcpp_SVMClient +setClass(Class = "Rcpp_SVMClient") + +#' Class MultiClassSVM +#' +#' Class \code{MultiClassSVM} defines a multiclass SVM model class. +#' +#' @name MultiClassSVM-class +#' @exportClass MultiClassSVM +setClass(Class = "MultiClassSVM") + +.createMultiClassSVM <- NULL + +#' @export +summary.MultiClassSVM <- NULL + +#' @export +plot.MultiClassSVM <- NULL + +#' @export +predict.MultiClassSVM <- NULL + +#' @export +print.MultiClassSVM <- NULL + +#' @title Predict using SVM object +#' @rdname predict.svm +#' +#' @description Returns predicted classes or distance to discriminative for provided test examples. +#' +#' @export +#' +#' @param object Trained SVM object +#' @param x_test Unlabeled data, in one of the following formats: +#' \code{data.frame}, \code{data.matrix}, \code{SparseM::matrix.csr}, \code{Matrix::Matrix}, +#' \code{slam::simple_triplet_matrix} +#' @param decision.function Uf \code{TRUE} returns SVMs decision function +#' (distance of a point from discriminant) instead of predicted labels, default: \code{FALSE} +#' @param ... other arguments not used by this method. +#' +#' @method predict Rcpp_SVMClient +#' +#' @examples +#' \dontrun{ +#' # firstly, SVM model needs to be trained +#' svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) +#' # then we can use it to predict unknown samples +#' predcit(svm, x_test) +#' } +predict.Rcpp_SVMClient <- NULL + +#' @title Plot SVM object +#' @rdname plot.svm +#' +#' @description Plots trained svm data and models disciriminative +#' +#' @export +#' +#' @param x Trained SVM object +#' @param X Optional new data points to be predicted and plotted in one of the following formats: +#' \code{data.frame}, \code{data.matrix}; default: \code{NULL} +#' @param mode Which plotting mode to use as string, available are: +#' \itemize{ +#' \item \code{'normal'} - default mode, plots data in cols argument and a linear decision +#' boundry in available +#' \item \code{'pca'} - preforms PCA decomposition and draws data in a subspace of first 2 dimensions +#' from the PCA +#' \item \code{'contour'} - countour plot for non-linear kernels +#' } +#' @param cols Data dimensions to be plotted as vector of length 2, default: \code{c(1,2)} +#' @param radius Radius of the plotted data points as float, default: \code{3} +#' @param radius.max Maximum radius of data points can be plotted, when model is trained +#' with example weights as float, default: \code{10} +#' @param ... other arguments not used by this method. +#' +#' @examples +#' \dontrun{ +#' # here we ause svm is a trained SVM model +#' plot(svm) +#' plot(svm, X=x, cols=c(1,3)) +#' plot(svm, mode="pca", radius=5) +#' } +#' +#' @method plot Rcpp_SVMClient +plot.Rcpp_SVMClient <- NULL + +#' @title Summary of SVM object +#' @rdname summary.svm +#' +#' @description Prints short summary of a trained model. +#' +#' @export +#' @param object Trained SVM object +#' @param ... other arguments not used by this method. +#' +#' @method summary Rcpp_SVMClient +#' +summary.Rcpp_SVMClient <- NULL + +#' @title Print SVM object +#' @rdname print.svm +#' +#' @description Prints short summary of a trained model. +#' +#' @export +#' @param x Trained SVM object +#' @param ... other arguments not used by this method. +#' +#' @method print Rcpp_SVMClient +#' +print.Rcpp_SVMClient <- NULL + +#' @title Caret model representation for SVM with radial kernel +#' +#' @description Supply as parameter "method" in the caret::train function +#' +#' @format List of caret specific values +#' +#' @examples +#' \dontrun{ +#' model <- train(Class ~ ., data = training, +#' method = caret.gmumSvmRadial, +#' preProc = c("center", "scale"), +#' tuneLength = 8, +#' trControl = fitControl, +#' tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +#' core = "libsvm", # gmum.R parameter - pick library +#' verbosity = 0 # no outputs +#' ) +#' } +#' @export +caret.gmumSvmRadial <- NULL + +#' @title Caret model representation for SVM with linear kernel +#' +#' @description Supply as parameter "method" in the caret::train function +#' +#' @format List of caret specific values +#' +#' @examples +#' \dontrun{ +#' model <- train(Class ~ ., data = training, +#' method = caret.gmumSvmLinear, +#' preProc = c("center", "scale"), +#' tuneLength = 8, +#' trControl = fitControl, +#' tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +#' core = "libsvm", # gmum.R parameter - pick library +#' verbosity = 0 # no outputs +#' ) +#' } +#' @export +caret.gmumSvmLinear <- NULL + +#' @title Caret model representation for SVM with linear kernel +#' +#' @description Supply as parameter "method" in the caret::poly function +#' +#' @format List of caret specific values +#' +#' @examples +#' \dontrun{ +#' model <- train(Class ~ ., data = training, +#' method = caret.gmumSvmPoly, +#' preProc = c("center", "scale"), +#' tuneLength = 8, +#' trControl = fitControl, +#' tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +#' core = "libsvm", # gmum.R parameter - pick library +#' verbosity = 0 # no outputs +#' ) +#' } +#' +#' @export +caret.gmumSvmPoly <- NULL + +#' @export +#' @rdname svm +SVM.formula <- NULL + +#' @export +#' @rdname svm +SVM.default <- NULL + +loadModule('svm_wrapper', TRUE) + +SVM.formula <- function(formula, data, ...) { + call <- match.call(expand.dots = TRUE) + + if (!inherits(formula, "formula")) + stop("Please provide valid formula for this method.") + if (inherits(data, "Matrix") || + inherits(data, "simple_triplet_matrix") || + inherits(data, "matrix.csr")) + stop("Please provide dense data for this method") + + labels <- all.vars(update(formula, . ~ 0)) + y <- data[, labels] + + # better way? + if (formula[3] == ".()") { + x <- data[, colnames(data) != labels] + } + else { + columns <- all.vars(update(formula, 0 ~ .)) + x <- data[, columns] + } + + if (is.data.frame(x)) + x <- data.matrix(x) + + ret <- SVM.default(x, y, ...) + assign("call", call, ret) + return(ret) +} + +.createMultiClassSVM <- function(x, y, class.type, ...) { + force(x) # Force non-lazy evaluation of arguments. This is just for devtools testthat to work, it has some strange issues. + force(y) + call <- match.call(expand.dots = TRUE) + ys <- as.factor(y) + tys <- table(ys) + lev <- levels(ys) + pick <- rbind(c(1),c(2)) + if (class.type == 'one.versus.all') { + ymat <- matrix(-1, nrow = nrow(x), ncol = length(tys)) + ymat[cbind(seq(along = ys), sapply(ys, function(x) + which(x == lev)))] <- 1 + # Result: ymat - dummy matrix where ymat[, i] is matrix for problem i + } else if (class.type == 'one.versus.one') { + ## Classification: one against one + nclass <- length(tys) + m <- (nclass - 1) + minus <- nclass + 1 - sequence(m:1) + plus <- rep(1:m, m:1) + pick <- rbind(plus, minus) + xsplit <- split(data.frame(x), ys) + ymat <- list() + xlist <- list() + for (k in 1:ncol(pick)) { + ymat[[k]] <- + c(rep(1, nrow(xsplit[[pick[1, k]]])), rep(-1, nrow(xsplit[[pick[2, k]]]))) + xlist[[k]] <- + rbind(xsplit[[pick[1, k]]], xsplit[[pick[2, k]]]) + } + + + # Result: ymat[[i]] - classes for problem i + # Result: xlist[[i]] - dataset for problem i + }else{ + stop("Incorrect class.type") + } + # Get number of subproblems + if (is.matrix(ymat)) { + J <- 1:ncol(ymat) + }else if (is.list(ymat)) { + J <- 1:length(ymat) + } + + models <- list() + + # Fit one model after another + for (j in J) { + x.model <- NULL + y.model <- NULL + + if (class.type == "one.versus.all") { + x.model <- x + y.model <- ymat[,j] + }else if (class.type == "one.versus.one") { + # Note: it could be improved, but not so easily in R (all is copy) + x.model <- xlist[[j]] + y.model <- ymat[[j]] + } + + p <- as.list(match.call(expand.dots = TRUE)) + p$x <- x.model + p$y <- as.factor(y.model) + # class weights + w <- p$class.weights + if (!is.null(w) && class.type == "one.versus.one") { + weight.pos <- w[lev[pick[1][j]]] + weight.neg <- w[lev[pick[2][j]]] + p$class.weights <- c("1" = weight.pos, "-1" = weight.neg) + } + models[[j]] <- do.call(SVM, p[2:length(p)]) + assign("call", call, models[[j]]) + } + + core <- as.list(call)$core + kernel <- as.list(call)$kernel + prep <- as.list(call)$prep + if (is.null(core)) + core <- "libsvm" + if (is.null(kernel)) + kernel <- "linear" + if (is.null(prep)) + prep <- "none" + + obj <- list( + models = models, + class.type = class.type, + .X = x, # getter also private + .Y = y, # getter also private + .pick = pick, # getter also private + .levels = lev, + call = call, + core = core, + kernel = kernel, + preprocessing = prep + ) + + class(obj) <- "MultiClassSVM" + obj +} + +SVM.default <- + function(x, + y, + core = "libsvm", + kernel = "linear", + prep = "none", + transductive.learning = FALSE, + transductive.posratio = -1., + C = 1, + gamma = if (is.vector(x)) + 1 + else + 1 / ncol(x), + coef0 = 0, + degree = 3, + class.weights = NULL, + example.weights = NULL, + cache_size = 100, + tol = 1e-3, + max.iter = -1, + verbosity = 4, + class.type = 'one.versus.all', + svm.options = '', + ...) { + force(x) + force(y) + + # First check if we have binary or multiclass case + if (!is.vector(y) && !is.factor(y)) { + stop("y is of a wrong class, please provide vector or factor") + } + + levels <- NULL + if (is.factor(y)) { + levels <- levels(y) + }else{ + # Standarizing, easier for library + y <- as.factor(y) + levels <- levels(y) + warning("It is recommended to pass y as factor") + } + + # We don't support transductive multiclass, because it is bazinga + if ((length(levels) > 2 && !transductive.learning)) { + params <- as.list(match.call(expand.dots = TRUE)) + #skipping first param which is function itself + params$class.weights <- class.weights + params$class.type <- class.type + return(do.call(.createMultiClassSVM, as.list(params[2:length(params)]))) + } + else if (length(levels) > 3 && + transductive.learning) { + # 3 or more classes + TR class + stop("Multiclass transductive learning is not supported!") + } + + if (core != "svmlight" && transductive.learning) + core <- "svmlight" + + call <- match.call(expand.dots = TRUE) + + # check for errors + if (core != "libsvm" && core != "svmlight") { + stop(sprintf("bad core: %s, available are: libsvm, svmlight", core)) + } + if (kernel != "linear" && + kernel != "poly" && kernel != "rbf" && kernel != "sigmoid") { + stop("bad kernel: %s") + } + if (prep != "2e" && + prep != "none") { + stop(sprintf("bad preprocess: %s", prep)) + } + if (verbosity < 0 || + verbosity > 6) { + stop("Wrong verbosity level, should be from 0 to 6") + } + if (C == 0 && + core == "libsvm") { + # libsvm does not handle C=0, svmlight does + warning("libsvm doesn't support C=0, switching to svmlight") + core <- "svmlight" + } + else if (C < 0) { + stop(sprintf("C parameter can't be negative: %f", C)) + } + + if (gamma <= 0) { + stop(sprintf("gamma parameter must be positive: %f", gamma)) + } + if (degree < 1 || + degree %% 1 != 0) { + stop(sprintf("degree parameter must be positive integer: %.2f", degree)) + } + + if (verbosity < 0 || + verbosity > 6) { + stop("Wrong verbosity level, should be from 0 to 6") + } + if ((transductive.posratio < 0 && + transductive.posratio != -1) || transductive.posratio > 1) { + stop("Please pass transductive.posratio in range [0,1]") + } + + # check data + if (nrow(x) != length(y)) { + stop("x and y have different lengths") + } + if (inherits(x, "Matrix")) { + x <- as(x, "matrix.csr") + } + else if (inherits(x, "simple_triplet_matrix")) { + ind <- order(data$i, data$j) + x <- new( + "matrix.csr", + ra = x$v[ind], + ja = x$j[ind], + ia = as.integer(cumsum(c( + 1, tabulate(x$i[ind]) + ))), + dimension = c(x$nrow, data$ncol) + ) + } + else if (inherits(x, "matrix.csr")) { + + } + else if (is.data.frame(x)) { + x <- data.matrix(x) + } + else if (!is.matrix(x)) { + stop( + "data is of a wrong class, please provide supported format: + matrix or data.frame for dense; + Matrix, simple_triplet_matrix or matrix.csr for sparse" + ) + } + + sparse <- inherits(x, "matrix.csr") + + if (sparse) { + if (is.null(y)) { + stop("Please provide label vector y for sparse matrix classification") + } + } + + # Binary classification or 2 classes + unlabeled (for transductive learning) + if ((length(levels) != 2 && !transductive.learning) || + (length(levels) != 3 && transductive.learning)) { + stop("Please pass correct (binary) number of classes or 3 for transductive") + } + + # Decide what label is used for unlabeled examples + unlabeled.level = "TR" + if (transductive.learning) { + if (!("TR" %in% levels || "0" %in% levels)) { + stop("Please include TR or 0 factor in levels for transductive learning") + } + if ("TR" %in% levels && "0" %in% levels) { + stop("Couldn't deduce which label to use for transductive learning") + } + + if ("TR" %in% levels) { + unlabeled.level <- "TR" + }else{ + unlabeled.level <- "0" + } + } + # This ugly block of code ensures -1, 1 and 0 classes. + # Contribution to simplifying this are welcome :) + if (transductive.learning) { + # Erasing TR from levels. We will never return it + levels <- levels[levels != unlabeled.level] + indexes.unlabeled <- y == unlabeled.level + z <- y[!indexes.unlabeled] + z <- as.integer(factor(z, levels = levels)) + z[z == 1] = -1 + z[z == 2] = 1 + + y <- as.integer(y) + y[indexes.unlabeled] <- 0 + y[!indexes.unlabeled] <- z + }else{ + y <- as.integer(y) # Standarization, omits 0! + y[y == 1] <- -1 # Standarize it further! + y[y == 2] <- 1 + } + + config <- new(SVMConfiguration) + config$y <- data.matrix(y) + + config$use_transductive_learning <- transductive.learning + config$transductive_posratio <- transductive.posratio + + # sparse + if (sparse) { + config$sparse <- 1 + + #x@ia - rowptr + #x@ja - colind + #x@ra - values + config$set_sparse_data(x@ia, x@ja, x@ra, nrow(x), ncol(x), TRUE) + } + else { + config$sparse <- 0 + config$x <- x + } + + config$setLibrary(core) + config$setKernel(kernel) + config$setPreprocess(prep) + config$set_verbosity(verbosity) + + config$C <- C + config$gamma <- gamma + config$coef0 <- coef0 + config$degree <- degree + config$eps <- tol + config$cache_size <- cache_size + config$max_iter <- max.iter + config$svm_options <- svm.options + + if (!is.null(class.weights) && !is.logical(class.weights)) { + if (is.null(names(class.weights)) && class.weights != 'auto') { + stop("Please provide class.weights as named (by classes) list or vector or 'auto'") + } + + if (is.character(class.weights) && class.weights == "auto") { + # sklearns heuristic automatic class weighting + counts <- hist(y, breaks = 2, plot = FALSE)$counts + inv_freq <- 1 / counts + weights <- inv_freq / mean(inv_freq) + config$setClassWeights(weights) + } + else { + # Maps name -> index that is feed into SVM + # Note: factor is transformed such that class -> index in levels of factor + class.labels.indexes <- + sapply(names(class.weights), function(cls) { + which(levels == cls)[1] + }) + # Standarize for all libraries (so if passed list("2"=1, "1"=3) it is reversed) + class.weights <- class.weights[order(class.labels.indexes)] + # We always pass numeric, so it will work if it is the case + if (!is.numeric(y)) { + stop("[DEV] breaking change, please fix") + } + config$setClassWeights(as.numeric(class.weights)) + } + } + + if (!is.null(example.weights) && !is.logical(example.weights)) { + config$use_example_weights <- 1 + config$example_weights <- example.weights + } + + # default for now + shrinking = TRUE + probability = FALSE + + if (shrinking) { + config$shrinking <- 1 + } else { + config$shrinking <- 0 + } + + if (probability) { + config$probability <- 1 + } else { + config$probability <- 0 + } + + client <- new(SVMClient, config) + client$.train() + + # R object often have fields that don't change accessible through $ notation + assign("call", call, client) + assign(".levels", levels, client) + assign("core", client$.getCore(), client) + assign("kernel", client$.getKernel(), client) + assign("preprocessing", client$.getPreprocess(), client) + assign("degree", client$.getDegree(), client) + assign("gamma", client$.getGamma(), client) + assign("C", client$.getC(), client) + assign("alpha", client$.getAlpha(), client) + assign("bias", client$.getBias(), client) + if(client$kernel == "linear") { + assign("w", client$.getW(), client) + } + assign("SV", client$.getSV(), client) + assign("numberSV", client$.getNumberSV(), client) + assign("numberClasses", client$.getNumberClass(), client) + assign("iterations", client$.getIterations(), client) + assign("isShrinking", client$.isShrinking(), client) + assign("isProbability", client$.isProbability(), client) + assign("areExamplesWeighted", client$.areExamplesWeighted(), client) + assign("exampleWeights", client$.getExampleWeights(), client) + assign("classWeights", client$.getClassWeights(), client) + + assign(".staticFields", c("call", "core", "kernel", "preprocessing", "degree", "gamma", "C", "alpha", "bias", "w", "SV", + "numberSV", "numberClasses", "iterations", "isShrinking", "isProbability", "areExamplesWeighted", + "exampleWeights", "classWeights"), client) + + client + } + +print.Rcpp_SVMClient <- function(x, ...) { + summary(x) +} + +show.Rcpp_SVMClient <- function(object) { + summary(object) +} + +show.MultiClassSVM <- function(object) { + summary(object) +} + +setMethod("show", "Rcpp_SVMClient", show.Rcpp_SVMClient) +setMethod("show", "MultiClassSVM", show.MultiClassSVM) + +summary.MultiClassSVM <- function(object, ...) { + print( + sprintf( + "Support Vector Machine, multiclass.type: %s, core: %s, preprocess: %s", + object$class.type, + object$core, + object$kernel, + object$prep + ) + ) + print(sprintf("%d classes", + length(object$.levels))) + object <- object$models[[1]] + print(sprintf("Parameters: kernel: %s, C: %f", object$kernel, object$C)) + + if (object$kernel == "rbf") { + print(sprintf("Kernel parameters: gamma: %f", + object$gamma)) + } + else if (object$kernel == "poly") { + print( + sprintf( + "Kernel parameters: gamma: %f, degree: %d, coef0: %f", + object$gamma, + object$degree, + object$coef0 + ) + ) + } + else if (object$kernel == "sigmoid") { + print(sprintf( + "Kernel parameters: gamma: %f, coef0: %f", + object$gamma, + object$coef0 + )) + } +} + +print.MultiClassSVM <- function(x, ...) { + summary(x) +} + +summary.Rcpp_SVMClient <- function(object, ...) { + print( + sprintf( + "Support Vector Machine, core: %s, preprocess: %s", + object$core, + object$preprocessing + ) + ) + print(sprintf("Parameters: kernel: %s, C: %f", object$kernel, object$C)) + + if (object$kernel == "rbf") { + print(sprintf("Kernel parameters: gamma: %f", + object$gamma)) + } + else if (object$kernel == "poly") { + print( + sprintf( + "Kernel parameters: gamma: %f, degree: %d, coef0: %f", + object$gamma, + object$degree, + object$coef0 + ) + ) + } + else if (object$kernel == "sigmoid") { + print(sprintf( + "Kernel parameters: gamma: %f, coef0: %f", + object$gamma, + object$coef0 + )) + } + print( + sprintf( + "%d classes with %d support vectors", + object$numberClasses, + object$numberSV + ) + ) +} + +plot.MultiClassSVM <- function(x, ...) { + plot.Rcpp_SVMClient(x, ...) +} + +plot.Rcpp_SVMClient <- + function(x, X = NULL, mode = "normal", cols = c(1,2), radius = 3, radius.max = + 10, ...) { + #0. Some initial preparing + if (mode != "pca" && mode != "normal" && mode != "contour") { + stop("Wrong mode!") + } + if (class(x) == "MultiClassSVM") { + obj <- x$models[[1]] + }else{ + obj <- x + } + # NOTE: Added SparseM to dependencies + # TODO: Do we need e1071? + if (obj$.isSparse()) { + if (!requireNamespace("e1071", quietly = TRUE)) { + stop("For sparse support install e1071 package") + } + } + + #1. Get X and Y + if (is.null(X)) { + new_data <- FALSE + if (class(x) == "MultiClassSVM") { + X <- x$.X + true_target <- as.factor(x$.Y) + }else{ + true_target <- as.factor(x$.getY()) + if (obj$.isSparse()) { + X <- Matrix::t(obj$.getSparseX()) + }else{ + X <- obj$.getX() + } + } + t <- predict(x, X) + + }else{ + new_data <- TRUE + t <- predict(x, X) + true_target <- NULL + } + labels <- levels(as.factor(t)) + + #2. Do some checking + + if (ncol(X) > 2 && mode != "pca") { + warning( + "Only 2 dimension plotting is supported for multiclass. Plotting using cols parameter" + ) + } + if (ncol(X) > 2 && mode == "contour") { + stop("Contour mode is supported only for 2 dimensional data") + } + if (ncol(X) == 1) { + stop("Plotting is not supported for 1 dimensional data") + } + + #3. Prepare df. This is ugly copy so that we can do whatever we want + if (obj$.isSparse()) { + df <- data.frame(SparseM::as.matrix(X[,cols])) + }else{ + df <- data.frame(X[,cols]) + } + colnames(df) <- c("X1", "X2") # This is even worse + df['prediction'] <- as.factor(t) + + if (!new_data) { + if (length(levels(true_target)) > length(x$.levels)) { + levels(true_target) <- c(x$.levels, "0") + } + else { + levels(true_target) <- x$.levels + } + df['label'] <- true_target + } + + #4. Prepare data for plotting + if (obj$.areExamplesWeighted()) { + df['sizes'] <- obj$.getExampleWeights() + scale_size <- + scale_size_continuous(range = c(radius,radius.max)) + }else { + df['sizes'] <- radius + scale_size <- scale_size_identity() + } + + #5. Support parameters + kernel <- obj$.getKernel() + + + if (mode == "pca") { + mx <- colMeans(X) + pca_data <- prcomp(X, scale = FALSE) + # Transform data + df$X1 <- pca_data$x[,1] + df$X2 <- pca_data$x[,2] + } + + w <- NULL + if (kernel == "linear" && class(x) != "MultiClassSVM") { + # W will be used only for binary model + if (mode == "pca") { + w <- c(obj$.getW()) + w <- (w - mx) %*% pca_data$rotation + }else if (ncol(X) == 2) { + w <- c(obj$.getW()) + } + } + + points <- NULL + + #6. PLOT + if (ncol(X) == 2 && mode == "contour") { + x_col <- df$X1 + y_col <- df$X2 + + x_max <- max(x_col) + x_min <- min(x_col) + y_max <- max(y_col) + y_min <- min(y_col) + + x_margin <- (x_max - x_min) / 10 + y_margin <- (y_max - y_min) / 10 + + x_max <- x_max + x_margin + x_min <- x_min - x_margin + y_max <- y_max + y_margin + y_min <- y_min - y_margin + + x_axis <- seq(from = x_min, to = x_max, length.out = 300) + y_axis <- seq(from = y_min, to = y_max, length.out = 300) + grid <- data.frame(x_axis,y_axis) + grid <- expand.grid(x = x_axis,y = y_axis) + + prediction <- predict(x, grid) + grid['prediction'] <- prediction + + + + if (new_data) + points <- + geom_point(data = df, aes(X1, X2, size = sizes, colour = prediction)) + else + points <- + geom_point(data = df, aes( + X1, X2, size = sizes, colour = prediction, shape = label + )) + + pl <- ggplot() + + geom_tile(data = grid, aes( + x = x,y = y, fill = prediction, alpha = .5 + )) + + scale_fill_brewer(palette = "Set1") + + scale_alpha_identity() + + points + + scale_colour_brewer(palette = "Set1") + + scale_size + + }else{ + if (ncol(X) > 2 && mode != "pca") + warning("Only limited plotting is currently supported for multidimensional data") + + if (new_data) + points <- + geom_point(data = df, aes(X1, X2, size = sizes, colour = prediction)) + else + points <- + geom_point(data = df, aes( + X1, X2, size = sizes, colour = prediction, shape = label + )) + + pl <- ggplot() + + points + + scale_colour_brewer(palette = "Set1") + + scale_size + } + + # Add line + if (!is.null(w) && + ncol(X) && mode != "pca" && mode != "contour") { + s <- -w[1] / w[2] + int <- -obj$.getBias() / w[2] + pl <- pl + geom_abline(slope = s, intercept = int) + } + + plot(pl) + } + +predict.MultiClassSVM <- function(object, x, ...) { + # Sums votes + prediction.row.oao <- function(r) { + object$.levels[which.max(sapply(1:length(object$.levels), function(cl) { + sum(r == cl) + }))] + } + # Argmax of decision function + prediction.row.oaa <- function(r) { + object$.levels[which.max(r)] + } + ymat <- c() + for (i in 1:length(object$models)) { + model <- object$models[[i]] + + if (object$class.type == "one.versus.one") { + pick <- as.integer(object$.pick[,i]) + pick <- pick[c(2,1)] # Reverse order + # Predict + prediction <- predict(model, x) + + # Replace labels + votes <- pick[as.integer(prediction)] + ymat <- cbind(ymat, votes) + }else{ + # Predict + prediction <- predict(model, x, decision.function = TRUE) + ymat <- cbind(ymat, prediction) + } + } + if (object$class.type == "one.versus.one") { + ymat.preds <- apply(ymat, 1, prediction.row.oao) + }else if (object$class.type == "one.versus.all") { + ymat.preds <- apply(ymat, 1, prediction.row.oaa) + }else{ + stop("Unrecognized class.type") + } + return(factor(ymat.preds, levels = object$.levels)) +} + +predict.Rcpp_SVMClient <- + function(object, x_test, decision.function = FALSE, ...) { + if (!is(x_test, "data.frame") && + !is(x_test, "matrix") && + !is(x_test,"numeric") && + !is(x_test,"matrix.csr")) { + stop("Wrong target class, please provide data.frame, matrix or numeric vector") + } + if (!object$.isSparse()) { + if (!is(x_test, "matrix") && + !is(x_test, "data.frame") && + !is.vector(x_test)) { + stop("Please provide matrix or data.frame") + } + if (!is(x_test, "matrix")) { + if (is.vector(x_test)) { + x_test <- t(as.matrix(x_test)) + } + else { + x_test <- data.matrix(x_test) + } + } + object$.predict(x_test) + } + else { + if (!is(x_test, "matrix.csr")) { + stop("Please provide sparse matrix") + } + object$.sparse_predict(x_test@ia, x_test@ja, x_test@ra, nrow(x_test), ncol(x_test)) + } + + if (decision.function) { + return(object$.getDecisionFunction()) + }else{ + prediction <- object$.getPrediction() + + if (any(prediction == 0) || + length(unique(prediction)) > length(object$.levels)) { + stop("Failed prediction, returned too many unique labels from library.") + } + + + if (!is.null(object$.levels)) { + # This line works because we do as.numeric() which transforms into 1 and 2 + # And we expect SVM to return same labels as passed + if (length(object$.levels) == 2) { + # Binary case + prediction <- + factor(object$.levels[(prediction + 1) / 2 + 1], levels = object$.levels) + }else{ + prediction <- + factor(object$.levels[prediction], levels = object$.levels) + } + + } + + prediction + } + } + +# Add (very basic) support for caret + +copy <- function(x) + x + +gmum.r.svm.radial.params = c("C", "gamma") +gmum.r.svm.radial.params.classes = c("double", "double") + +gmum.r.svm.linear.params = c("C") +gmum.r.svm.linear.params.classes = c("double") + +gmum.r.svm.poly.params = c("C", "gamma", "degree", "coef0") +gmum.r.svm.poly.params.classes = c("double", "double", "double", "double") + +caret.gmumSvmRadial <- list( + label = "gmum.r.svmRadial", + library = c("gmum.r"), + type = "Classification", + parameters = data.frame( + parameter = gmum.r.svm.radial.params, + class = gmum.r.svm.radial.params.classes, + label = gmum.r.svm.radial.params + ), + grid = function(x, y, len = NULL) { + # We pass tuning grid manually. + expand.grid(C = 10 ^ (-7:11), + gamma = 10 ^ (-10:10)) + }, + fit = function(x, y, wts, param, lev, last, classProbs, ...) { + ## First fti the pls model, generate the training set scores, + ## then attach what is needed to the random forest object to + ## be used late + x.df = as.data.frame(x) + x.df$y = as.numeric(y) + param$kernel = 'linear' + + if (is.null(param$gamma)) { + param$gamma = 1 + }else{ + param$kernel = 'rbf' + } + if (is.null(param$degree)) { + param$degree = 3 + }else{ + param$kernel = 'poly' + } + if (is.null(param$coef0)) { + param$coef0 = 0 + } + + + sv <- gmum.r::SVM( + x = x, + y = y, + C = param$C, + gamma = param$gamma, + degree = param$degree, + coef0 = param$coef0, + probability = classProbs, + kernel = param$kernel, + ... + ) + + return(sv) + }, + predict = function(modelFit, newdata, submodels = NULL) { + as.factor(predict(modelFit, newdata)) + }, + prob = function(modelFit, newdata, submodels = NULL) { + predict(modelFit, newdata) + }, + varImp = NULL, + levels = function(x) { + levels(x$.getY()) + }, + sort = function(x) + x[order(x[,1]),] +) + +caret.gmumSvmLinear.loc <- copy(caret.gmumSvmRadial) +caret.gmumSvmPoly.loc <- copy(caret.gmumSvmRadial) + + +caret.gmumSvmLinear.loc$parameters <- + data.frame(parameter = gmum.r.svm.linear.params, + class = gmum.r.svm.linear.params.classes, + label = gmum.r.svm.linear.params) + +caret.gmumSvmLinear.loc$grid <- function(x, y, len = NULL) { + expand.grid(C = 10 ^ (-7:11)) +} + +caret.gmumSvmPoly.loc$grid <- function(x, y, len = NULL) { + expand.grid( + C = 10 ^ (-7:11), gamma = 10 ^ (-10:10), coef0 = c(0,1,10), degree = c(2,3,4) + ) +} + + +caret.gmumSvmPoly.loc$parameters <- + data.frame(parameter = gmum.r.svm.poly.params, + class = gmum.r.svm.poly.params.classes, + label = gmum.r.svm.poly.params) + +caret.gmumSvmPoly <- caret.gmumSvmPoly.loc +caret.gmumSvmLinear <- caret.gmumSvmLinear.loc diff --git a/R/svm.utils.R b/R/svm.utils.R new file mode 100644 index 00000000..d788ae8e --- /dev/null +++ b/R/svm.utils.R @@ -0,0 +1,168 @@ +svm.data.root <- system.file("data_sets", "svm", package="gmum.r") +svm.colon_cancer.path <- file.path(svm.data.root, "colon-cancer") + +svm.lib.libsvm <- "libsvm" +svm.lib.svmlight <- "svmlight" + +svm.prep.2e <- "2e" +svm.prep.none <- "none" + +svm.kernel.linear <- "linear" +svm.kernel.poly <- "poly" +svm.kernel.rbf <- "rbf" +svm.kernel.sigmoid <- "sigmoid" + +svm.plot.contour <- "contour" +svm.plot.pca <- "pca" + +read.libsvm = function( filename, dimensionality ) { + + content = readLines( filename ) + num_lines = length( content ) + yx = matrix( 0, num_lines, dimensionality + 1 ) + + # loop over lines + for ( i in 1:num_lines ) { + + # split by spaces + line = as.vector( strsplit( content[i], ' ' )[[1]]) + + # save label + yx[i,1] = as.numeric( line[[1]] ) + + # loop over values + for ( j in 2:length( line )) { + + # split by colon + index_value = strsplit( line[j], ':' )[[1]] + + index = as.numeric( index_value[1] ) + 1 # +1 because label goes first + value = as.numeric( index_value[2] ) + + yx[i, index] = value + } + } + + return( yx ) +} + +svm.dataset.colon_cancer <- function() { + bc <- read.libsvm(svm.colon_cancer.path, 2000) + return(bc) +} + +svm.dataset.circles <- function() { + matrix( + c(0,1,0,1,0,0,1,1,0,1,1,0), + ncol=3, + nrow=4, + dimnames=list(c(),c("x","y","t"))) +} + +#' @title Measure accuracy scoreof a prediction +#' +#' @description Calculates accuracy of a prediction, returns precent of correctly predicted examples +#' over all test examples. +#' @export svm.accuracy +#' @rdname svm.accuracy +#' +#' @usage svm.accuracy(prediction, target) +#' +#' @param prediction factor or 1 dim vector with predicted classes +#' @param target factor or 1 dim vector with true classes +#' +#' #' @examples +#' \dontrun{ +#' # firstly, SVM model needs to be trained +#' svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) +#' # then we can use it to predict unknown samples +#' p <- predcit(svm, x_test) +#' acc <- svm.accuracy(p, y) +#' } +svm.accuracy <- function(prediction, target) { + if ( length(target) != length(prediction)) { + stop("Prediction's and target's length don't match!") + } + + diff = as.numeric(as.factor(target)) - as.numeric(as.factor(prediction)) + acc <- sum(diff == 0) / length(target) + return(acc) +} + +multiplot <- function(..., plotlist=NULL, file, cols=1, layout=NULL) { + # Make a list from the ... arguments and plotlist + plots <- c(list(...), plotlist) + + numPlots = length(plots) + + # If layout is NULL, then use 'cols' to determine layout + if (is.null(layout)) { + # Make the panel + # ncol: Number of columns of plots + # nrow: Number of rows needed, calculated from # of cols + layout <- matrix(seq(1, cols * ceiling(numPlots/cols)), + ncol = cols, nrow = ceiling(numPlots/cols)) + } + + if (numPlots==1) { + print(plots[[1]]) + + } else { + # Set up the page + grid.newpage() + pushViewport(viewport(layout = grid.layout(nrow(layout), ncol(layout)))) + + # Make each plot, in the correct location + for (i in 1:numPlots) { + # Get the i,j matrix positions of the regions that contain this subplot + matchidx <- as.data.frame(which(layout == i, arr.ind = TRUE)) + + print(plots[[i]], vp = viewport(layout.pos.row = matchidx$row, + layout.pos.col = matchidx$col)) + } + } +} + +scale.data.frame <- + function(x, center = TRUE, scale = TRUE) + { + i <- sapply(x, is.numeric) + if (ncol(x[, i, drop = FALSE])) { + x[, i] <- tmp <- scale.default(x[, i, drop = FALSE], na.omit(center), na.omit(scale)) + if(center || !is.logical(center)) + attr(x, "scaled:center")[i] <- attr(tmp, "scaled:center") + if(scale || !is.logical(scale)) + attr(x, "scaled:scale")[i] <- attr(tmp, "scaled:scale") + } + x + } + +read.matrix.csr <- function(file, fac = TRUE, ncol = NULL) { + l <- strsplit(readLines(file), "[ ]+") + + ## extract y-values, if any + y <- if (is.na(l[[1]][1]) || length(grep(":",l[[1]][1]))) + NULL + else + sapply(l, function(x) x[1]) + + ## x-values + rja <- do.call("rbind", + lapply(l, function(x) + do.call("rbind", + strsplit(if (is.null(y)) x else x[-1], ":") + ) + ) + ) + ja <- as.integer(rja[,1]) + ia <- cumsum(c(1, sapply(l, length) - !is.null(y))) + + max.ja <- max(ja) + dimension <- c(length(l), if (is.null(ncol)) max.ja else max(ncol, max.ja)) + x = new(getClass("matrix.csr", where = asNamespace("SparseM")), + ra = as.numeric(rja[,2]), ja = ja, + ia = as.integer(ia), dimension = as.integer(dimension)) + if (length(y)) + list(x = x, y = if (fac) as.factor(y) else as.numeric(y)) + else x +} diff --git a/README.md b/README.md index 8f71fd6c..9b6c1a84 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,134 @@ -gmum.r -====== -R package for our group's models. +#gmum.R +[![Build Status](https://travis-ci.org/gmum/gmum.r.svg?branch=dev)](https://travis-ci.org/gmum/gmum.r/) -For more information and useful links please check wiki. +gmum.R is a package consisting in various models. We focus on efficiency (underlying C++ implementation) and easy of usage. +gmum.r is a close collaboration between GMUM group members (http://gmum.net) and students. + +Package includes three subpackages: + +* SVM with various underlying libraries and modifications +* Cross Entropy Clustering (CEC) clustering algorithm +* Growing Neural Gas clustering algorithm + +## Links + +* Documentation: [gmum.R.pdf](http://gmum.net/files/gmum.r/gmum.R.pdf) + +* Installation: [src/README.md](src/README.md) + +* Samples: [http://r.gmum.net/getting_started.html](http://r.gmum.net/getting_started.html) + +## SVM + +SVM Wrapper is a part of the gmum.R project which provides a popular Support Vector Machine inplementations wrapped in the R package. + +SVM Iris and sample weights examples +
+ + +### Example usage + +```R +library('gmum.r') + +# Load a dataset, here we have provided an example +data(svm_breast_cancer_dataset) +ds <- svm.breastcancer.dataset + +# Create new SVM object +svm <- SVM( formula = X1~. , + data = ds, + core = "libsvm", + kernel = "linear", + prep = "none", + C = 10) + +x <- ds.X(svm) +y <- ds.Y(svm) + +# Classify your dataset using predict function +prediction <- predict(svm, x) + +# Check models accuracy +acc <- svm.accuracy(prediction=y, target=prediction) +``` + +## Growing Neural Gas + +Subpackage containing **efficient**, **online** GNG algorithm. It produces topological graph, that you can easily convert to igraph, or you can +dump your model to optimized binary file and load it later on. + +Clustering of the UCI wine dataset +
+ +### Example: cluster wine dataset + +In this example we will construct a clustering of UCI wine dataset using offline GNG. + +```R +library(gmum.r) + +# Load data +wine <- get.wine.dataset.X() + +# Train in an offline manner +gng <- GNG(wine, labels=get.wine.dataset.y(), max.nodes=20) + +# Find closest node to vector composed of 1 +predict(gng, rep(1,ncol(wine))) + +# Find mean error +meanError(gng) + +# Plot with first 2 coordinates as position +plot(gng, vertex.color="cluster") +``` + +## Cross Entropy Clustering + +CEC aims to efficiently implement Cross Entropy Clustering Algorithm as R extension. + +Cross-entropy clustering (shortly CEC) joins advantages of classical k-means with those of EM. Moreover, contrary to k-means and EM, CEC **finds the optimal number of clusters** by automatically removing redundant ones. + +CEC clustering +
+ + +### Example usage + +```R +library(gmum.r) + +data(cec.mouse1.spherical) +dataset = cec.mouse1.spherical + +# That is the dataset we want to cluster +plot(dataset) + +# Run cec with default parameters. Set the number of clusters and the dataset. +c <- CEC(k=3, x=dataset) +plot(c) + +# Since initial clusterization is random. It may be a good idea to run cec multiple times and choose the best result. +c <- CEC(k=3, x=dataset, control.nstart=10) +plot(c) + +# Better than before, however, we know that clusters are spherical; let's inform cec about that. +c <- CEC(k=3, x=dataset, control.nstart=10, method.type='spherical') +plot(c) + +# You can learn details of clustering like this +centers(c) +covMatrix(c) + +# You can predict cluster which a point would belong to: +predict(c, c(1,1)) + +# You can visualise size and shape of clusters +plot(c, ellipses=TRUE) + +# Try the same with random assignment +c <- CEC(k=3, x=dataset, control.nstart=10, method.type='spherical', method.init='random') +plot(c) +``` -For actual version remember to switch to the dev branch. diff --git a/cmake/FindR.cmake b/cmake/FindR.cmake new file mode 100644 index 00000000..f0d67636 --- /dev/null +++ b/cmake/FindR.cmake @@ -0,0 +1,141 @@ +# Defines the following: +# R_INCLUDE_DIR - Path to R include directories +# R_CXX_FLAGS - R library compiler flags (include directories) +# R_LD_FLAGS - R library linker flags +# R_LIBS - R libraries +# R_LIB_DIRS - R library link directories + +function(GetItemsWithNoPrefix resultVar prefix) + set(result) + foreach(ITR ${ARGN}) + if(NOT ITR MATCHES "^${prefix}.*") + list(APPEND result ${ITR}) + endif() + endforeach() + set(${resultVar} ${result} PARENT_SCOPE) +endfunction() + +function(GetItemsWithPrefix resultVar prefix) + set(result) + foreach(ITR ${ARGN}) + if(ITR MATCHES "^${prefix}.*") + list(APPEND result ${ITR}) + endif() + endforeach() + set(${resultVar} ${result} PARENT_SCOPE) +endfunction() + +function(RemovePrefix resultVar prefix) + set(result) + foreach(ITR ${ARGN}) + if(ITR MATCHES "^${prefix}.*") + STRING(REGEX REPLACE "^${prefix}" "" ITR ${ITR}) + endif() + list(APPEND result ${ITR}) + endforeach() + set(${resultVar} ${result} PARENT_SCOPE) +endfunction() + +set(TEMP_CMAKE_FIND_APPBUNDLE ${CMAKE_FIND_APPBUNDLE}) +set(CMAKE_FIND_APPBUNDLE "NEVER") +find_program(R_COMMAND R DOC "R executable.") +set(CMAKE_FIND_APPBUNDLE ${TEMP_CMAKE_FIND_APPBUNDLE}) + +if(R_COMMAND) + execute_process(WORKING_DIRECTORY . + COMMAND ${R_COMMAND} RHOME + OUTPUT_VARIABLE R_ROOT_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + set(R_HOME ${R_ROOT_DIR} CACHE PATH "R home directory obtained from R RHOME") + + if(R_HOME) + message("-- R_HOME found: ${R_HOME}") + else(R_HOME) + message(FATAL_ERROR "R_HOME not found" ) + endif(R_HOME) + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} "--slave" "--no-save" "-e" "cat(R.home('include'))" + OUTPUT_VARIABLE R_INCLUDE_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(R_INCLUDE_DIR) + message("-- R_INCLUDE_DIR found: ${R_INCLUDE_DIR}") + else(R_INCLUDE_DIR) + message(FATAL_ERROR "R_INCLUDE_DIR not found" ) + endif(R_INCLUDE_DIR) + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} CMD config --ldflags + OUTPUT_VARIABLE R_LD_FLAGS + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(NOT R_LD_FLAGS) + message(FATAL_ERROR "R_LD_FLAGS not found" ) + endif(NOT R_LD_FLAGS) + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} "CMD" "config" "LAPACK_LIBS" + OUTPUT_VARIABLE R_LIBRARY_LAPACK + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(R_LIBRARY_LAPACK) + message("-- R_LIBRARY_LAPACK found: ${R_LIBRARY_LAPACK}") + else(R_LIBRARY_LAPACK) + message(FATAL_ERROR "R_LIBRARY_LAPACK not found" ) + endif(R_LIBRARY_LAPACK) + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} "CMD" "config" "BLAS_LIBS" + OUTPUT_VARIABLE R_LIBRARY_BLAS + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(R_LIBRARY_BLAS) + message("-- R_LIBRARY_BLAS found: ${R_LIBRARY_BLAS}") + else(R_LIBRARY_BLAS) + message(FATAL_ERROR "R_LIBRARY_BLAS not found" ) + endif(R_LIBRARY_BLAS) + + set(R_LD_FLAGS "${R_LD_FLAGS} ${R_LIBRARY_LAPACK} ${R_LIBRARY_BLAS}") + SEPARATE_ARGUMENTS(R_LD_FLAGS) + + GetItemsWithPrefix(R_LIBS "-l" ${R_LD_FLAGS}) + RemovePrefix(R_LIBS "-l" ${R_LIBS}) + message(STATUS "R_LIBS: ${R_LIBS}") + GetItemsWithNoPrefix(R_LD_FLAGS "-l" ${R_LD_FLAGS}) + + GetItemsWithPrefix(R_LIB_DIRS "-L" ${R_LD_FLAGS}) + RemovePrefix(R_LIB_DIRS "-L" ${R_LIB_DIRS}) + message(STATUS "R_LIB_DIRS: ${R_LIB_DIRS}") + GetItemsWithNoPrefix(R_LD_FLAGS "-L" ${R_LD_FLAGS}) + + string (REPLACE ";" " " R_LD_FLAGS "${R_LD_FLAGS}") + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} --vanilla --slave -e "RcppArmadillo:::CxxFlags()" + OUTPUT_VARIABLE RCPPARMADILLO_CXX_FLAGS + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(RCPPARMADILLO_CXX_FLAGS) + message("-- RCPPARMADILLO_CXX_FLAGS found: ${RCPPARMADILLO_CXX_FLAGS}") + else(RCPPARMADILLO_CXX_FLAGS) + message(FATAL_ERROR "RCPPARMADILLO_CXX_FLAGS not found" ) + endif(RCPPARMADILLO_CXX_FLAGS) + + EXECUTE_PROCESS( + COMMAND ${R_COMMAND} --vanilla --slave -e "Rcpp:::CxxFlags()" + OUTPUT_VARIABLE RCPP_CXX_FLAGS + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(RCPP_CXX_FLAGS) + message("-- RCPP_CXX_FLAGS found: ${RCPP_CXX_FLAGS}") + else(RCPP_CXX_FLAGS) + message(FATAL_ERROR "RCPP_CXX_FLAGS not found" ) + endif(RCPP_CXX_FLAGS) + + message(STATUS "R_LD_FLAGS: ${R_LD_FLAGS}") + set(R_CXX_FLAGS "${RCPP_CXX_FLAGS} ${RCPPARMADILLO_CXX_FLAGS}" CACHE TYPE STRING) + message(STATUS "R_CXX_FLAGS: ${R_CXX_FLAGS}") +else() + message(SEND_ERROR "FindR.cmake requires the following variables to be set: R_COMMAND") +endif() diff --git a/data/cec.ellipsegauss.RData b/data/cec.ellipsegauss.RData new file mode 100644 index 00000000..337833af Binary files /dev/null and b/data/cec.ellipsegauss.RData differ diff --git a/data/cec.mouse1.RData b/data/cec.mouse1.RData new file mode 100644 index 00000000..eba7ef9e Binary files /dev/null and b/data/cec.mouse1.RData differ diff --git a/data/cec.mouse1.classic.RData b/data/cec.mouse1.classic.RData new file mode 100644 index 00000000..8ef0e7f9 Binary files /dev/null and b/data/cec.mouse1.classic.RData differ diff --git a/data/cec.mouse1.spherical.RData b/data/cec.mouse1.spherical.RData new file mode 100644 index 00000000..a5b84020 Binary files /dev/null and b/data/cec.mouse1.spherical.RData differ diff --git a/data/cec.mouse2.spherical.RData b/data/cec.mouse2.spherical.RData new file mode 100644 index 00000000..11891f56 Binary files /dev/null and b/data/cec.mouse2.spherical.RData differ diff --git a/data/cec.simple1.RData b/data/cec.simple1.RData new file mode 100644 index 00000000..d50ef1d3 Binary files /dev/null and b/data/cec.simple1.RData differ diff --git a/data/cec.tset.RData b/data/cec.tset.RData new file mode 100644 index 00000000..4310fbfc Binary files /dev/null and b/data/cec.tset.RData differ diff --git a/data/svm.transduction.RData b/data/svm.transduction.RData new file mode 100644 index 00000000..41153433 Binary files /dev/null and b/data/svm.transduction.RData differ diff --git a/data/svm_breast_cancer_dataset.RData b/data/svm_breast_cancer_dataset.RData new file mode 100644 index 00000000..00f43979 Binary files /dev/null and b/data/svm_breast_cancer_dataset.RData differ diff --git a/data/svm_two_circles_dataset.RData b/data/svm_two_circles_dataset.RData new file mode 100644 index 00000000..5abfa2ea Binary files /dev/null and b/data/svm_two_circles_dataset.RData differ diff --git a/data/svm_two_ellipsoids_dataset.RData b/data/svm_two_ellipsoids_dataset.RData new file mode 100644 index 00000000..4f366f03 Binary files /dev/null and b/data/svm_two_ellipsoids_dataset.RData differ diff --git a/demo/00Index b/demo/00Index new file mode 100644 index 00000000..ff06b932 --- /dev/null +++ b/demo/00Index @@ -0,0 +1,2 @@ +cec Simple CEC demo +svm-demo Simple SVM demo diff --git a/demo/cec.R b/demo/cec.R new file mode 100644 index 00000000..545d19f4 --- /dev/null +++ b/demo/cec.R @@ -0,0 +1,33 @@ +library(gmum.r) + +data(cec.mouse1.spherical) +dataset = cec.mouse1.spherical + +# That is the dataset we want to cluster: +plot(dataset, main="Mouse-like dataset") + +# Run CEC with default parameters. Set the number of clusters and the dataset. +c <- CEC(k=3, x=dataset) +plot(c) + +# Since initial clusterization is random. It may be a good idea to run CEC multiple times and choose the best result. +c <- CEC(k=3, x=dataset, control.nstart=10) +plot(c) + +# Better than before, however, we know that clusters are spherical; let's inform CEC about that. +c <- CEC(k=3, x=dataset, control.nstart=10, method.type='spherical') +plot(c) + +# Learn details of clustering: +c$centers +c$covMatrix + +# Predict cluster which a point would belong to: +predict(c, c(1,1)) + +# Visualise size and shape of clusters: +plot(c, ellipses=TRUE) + +# Try the same with random assignment. +c <- CEC(k=3, x=dataset, control.nstart=10, method.type='spherical', method.init='random') +plot(c) diff --git a/demo/extra/df.RDa b/demo/extra/df.RDa new file mode 100644 index 00000000..5a4d914c Binary files /dev/null and b/demo/extra/df.RDa differ diff --git a/demo/extra/mnist.R b/demo/extra/mnist.R new file mode 100644 index 00000000..5793d339 --- /dev/null +++ b/demo/extra/mnist.R @@ -0,0 +1,96 @@ +library(igraph) + +#################################################################### +# Clustering MNIST dataset with GNG algorithm # +#################################################################### + + +### Helper functions ### +load_mnist <- function() { + load_image_file <- function(filename) { + ret = list() + f = file(filename,'rb') + readBin(f,'integer',n=1,size=4,endian='big') + ret$n = readBin(f,'integer',n=1,size=4,endian='big') + nrow = readBin(f,'integer',n=1,size=4,endian='big') + ncol = readBin(f,'integer',n=1,size=4,endian='big') + x = readBin(f,'integer',n=ret$n*nrow*ncol,size=1,signed=F) + ret = matrix(x, ncol=nrow*ncol, byrow=T) + close(f) + ret + } + load_label_file <- function(filename) { + f = file(filename,'rb') + readBin(f,'integer',n=1,size=4,endian='big') + n = readBin(f,'integer',n=1,size=4,endian='big') + y = readBin(f,'integer',n=n,size=1,signed=F) + ret = list() + close(f) + ret = y + ret + } + train <<- load_image_file('./data/train-images-idx3-ubyte') + test <<- load_image_file('./data/t10k-images-idx3-ubyte') + + train <- train/255.0 + test <- test/255.0 + + data = list() + data$train = cbind(train, as.matrix(load_label_file('./data/train-labels-idx1-ubyte'))) + data$test = cbind(test, as.matrix(load_label_file('./data/t10k-labels-idx1-ubyte'))) + + data +} + +show_digit <- function(arr784, col=gray(12:1/12), ...) { + print(matrix(arr784, nrow=28, ncol=28)[1,]) + image(matrix(arr784, nrow=28, ncol=28)[,28:1], col=col, ...) +} + +### Configure and load examples ### +train.examples <- 10000 +max.nodes <- 100 +max.iter = 500 +data <- load_mnist() +X = data$train[1:train.examples,-785] +Y = data$train[1:train.examples,785] +X.test = data$test[,-785] +Y.test = data$test[,785] + +### Train Optimized GNG ### +gng <- OptimizedGNG(max.nodes=max.nodes, x=X, value.range=c(0,1), + labels=Y, max.iter=max.iter, min.improvement=1e-2) + + +### Print some variables and save ### +numberNodes(gng) +meanError(gng) +save.gng(gng, "mnist.trained.100.bin") + +### Plot using igraph layout and coloring from extra vertex ### +plot(gng, mode=gng.plot.2d.errors, + vertex.color=gng.plot.color.label, layout=gng.plot.layout.igraph.fruchterman.fast) + + +### Show closest to some examples ### +id=200 +show_digit(X.test[id,]) +show_digit(node(gng, predict(gng, X.test[id,])+1)$pos) + +id=300 +show_digit(X.test[id,]) +show_digit(node(gng, predict(gng, X.test[id,])+1)$pos) + +id=400 +show_digit(X.test[id,]) +show_digit(node(gng, predict(gng, X.test[id,])+1)$pos) + + +### Plot centroids ### +centr <- centroids.gng(gng) +centroids_pos = lapply(centr, function(x){ node(gng, x)$pos}) +par(mfrow=c(2,2)) +show_digit(node(gng, centr[1])$pos) +show_digit(node(gng, centr[2])$pos) +show_digit(node(gng, centr[3])$pos) +show_digit(node(gng, centr[4])$pos) diff --git a/demo/extra/tfml_demo.R b/demo/extra/tfml_demo.R new file mode 100644 index 00000000..46de1522 --- /dev/null +++ b/demo/extra/tfml_demo.R @@ -0,0 +1,137 @@ +library(gmum.r) + +# --- CEC + +# Load a data set +data(cec_mouse_1_spherical) +mouse <- input +par(mfrow=c(1,1)) + +# See what are we dealing with +plot(mouse, main="Mouse dataset") + +# Run CEC on it +c <- CEC(k=3, x=mouse) +plot(c) +title(main="CEC on mouse") + +# Since initial clusterization is random. +# It may be a good idea to run cec multiple times +# and choose the best result. +c <- CEC(k=3, x=mouse, control.nstart=10) +plot(c) +title(main="Best result from 10 CEC runs") + +# Better than before, however, we know that clusters +# are spherical; let's inform cec about that. +c <- CEC(k=3, x=mouse, method.type='spherical') +plot(c) +title(main="CEC with predestined cluster type") + +# You can learn details of clustering like this +centers(c) +cov(c) + +# You can visualise size and shape of clusters +plot(c, ellipses=TRUE) +title(main="CEC cluster shapes") + +# Unfair k-means comparison +library(stats) +cec <- CEC(k=3, x=mouse, control.nstart=10) +km <- kmeans(centers=3, x=mouse, nstart=10) + +par(mfrow=c(1,2)) +plot(mouse, col=km$cluster) +title(main="k-means") +plot(cec) +title(main="CEC") + +# --- SVM + +# Data sets +data(svm_two_circles_dataset) +data(svm_two_ellipsoids_dataset) + +# Let's run 2 SVMs on first data set, +# one normal, the other with 2e preprocessing +svm <- SVM(V3~., + svm.twoellipsoids.dataset, + verbosity = 0); + +esvm <- SVM(V3~., + svm.twoellipsoids.dataset, + prep="2e", + verbosity = 0); + +# Plot the results +p1 <- plot(svm) + + scale_x_continuous(limits=c(-10, 10)) + + scale_y_continuous(limits=c(-10, 10)) + + ggtitle("SVM on two elipsoids") +p2 <- plot(esvm) + + scale_x_continuous(limits=c(-10, 10)) + + scale_y_continuous(limits=c(-10, 10)) + + ggtitle("2eSVM on two elipsoids") +multiplot(p1,p2) + +# Let's try the same thing on the other data set +svm <- SVM(V3~., + svm.twocircles.dataset, + verbosity = 0); +esvm <- SVM(V3~., + svm.twocircles.dataset, + prep = "2e", + verbosity = 0); + +# Plot the results +p1 <- plot(svm) + + scale_x_continuous(limits=c(-5, 10)) + + scale_y_continuous(limits=c(-5, 5)) + + ggtitle("SVM on two elipsoids") +p2 <- plot(esvm) + + scale_x_continuous(limits=c(-5, 10)) + + scale_y_continuous(limits=c(-5, 5)) + + ggtitle("2eSVM on two elipsoids") +multiplot(p1,p2) + +# --- + +# Let's say we changed our mind +svm <- SVM(V3~., + svm.twocircles.dataset, + lib="svmlight", + verbosity=0); +plot(svm) +svm$call + +# --- + +# Load some data and weights +load("demo/extra/df.RDa") +load("demo/extra/w.RDa") +formula <- Y~. +const_weights <- rep(1,nrow(df)) + +# Fit two SVMs, one with weighted examples +svm <- SVM(formula, + df, + lib = "svmlight", + kernel = "rbf", + C = 10, + gamma = 30, + verbosity = 0); + +wsvm <- SVM(formula, + df, + lib = "svmlight", + kernel = "rbf", + example.weights = weights, + C = 10, + gamma = 30, + verbosity = 0); + +# Plot +p1 <-plot(svm) +p2 <- plot(wsvm) + diff --git a/demo/extra/w.RDa b/demo/extra/w.RDa new file mode 100644 index 00000000..c3d78d71 Binary files /dev/null and b/demo/extra/w.RDa differ diff --git a/demo/samples/README.md b/demo/samples/README.md new file mode 100644 index 00000000..38665896 --- /dev/null +++ b/demo/samples/README.md @@ -0,0 +1,5 @@ +To run demo compilation install pandadoc (from website, for instance they have .deb for Debians) and +then install http://cran.r-project.org/web/packages/pander/index.html - might require downloading +manually tar.gz and running `R CMD INSTALL ` + +Then you can compile by running `R --vanilla < compile.R` diff --git a/demo/samples/cec.T.dataset.R b/demo/samples/cec.T.dataset.R new file mode 100644 index 00000000..33669a86 --- /dev/null +++ b/demo/samples/cec.T.dataset.R @@ -0,0 +1,26 @@ +#' --- +#' title: "Pick types of clusters in CEC" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# Load our library +library(gmum.r) + +# Load T-like data set +data(cec.tset) + +# We will be using 3 types of clusters +standard_cluster_param = list(method.type = "standard") +spherical_cluster_param = list(method.type = "spherical") +diagonal_cluster_param = list(method.type = "diagonal") + +# 100 times run CEC algorithm on the T-like dataset. We start with standard, spherical and 2 diagonal cluster. +c <- CEC(x = Tset, params.mix = list(standard_cluster_param, spherical_cluster_param, diagonal_cluster_param, diagonal_cluster_param), control.nstart = 100, control.eps=0.09) + +# Plot centroids with elipses and centers +plot(c, ellipses = TRUE, centers = TRUE) + diff --git a/demo/samples/cec.basic.R b/demo/samples/cec.basic.R new file mode 100644 index 00000000..7cf88a1d --- /dev/null +++ b/demo/samples/cec.basic.R @@ -0,0 +1,27 @@ +#' --- +#' title: "Basic clustering of mouse dataset using CEC" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# Load our library +library(gmum.r) + +# Fitting to mouse like data set +data(cec.mouse1.spherical) + +# Look at Mouse-like data set +plot(cec.mouse1.spherical, main="Mouse-like data set") + +# Fit spherical gaussians to the data set +cec <- CEC(k=3, x=cec.mouse1.spherical, control.nstart=10, method.type="spherical") + +# Plot results +plot(cec, centers=TRUE, ellipses=TRUE) + +# Predict cluster which given points (1,1) and (2,2) would belong to: +predict(cec, c(1,1)) +predict(cec, c(2,2)) diff --git a/demo/samples/cec.ellipse.R b/demo/samples/cec.ellipse.R new file mode 100644 index 00000000..a0d257f8 --- /dev/null +++ b/demo/samples/cec.ellipse.R @@ -0,0 +1,23 @@ +#' --- +#' title: "Fit any gauss family to dataset in CEC" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# Load our library +library(gmum.r) + +# Load ellipse_gauss data set +data(cec.ellipsegauss) + +# Try to fit 5 gaussians to the data set +cec <- CEC(k=5, x=cec.ellipsegauss, method.init="random") + +# Note that we set 5 cluster. Only 4 was needed. +plot(cec, centers=TRUE, ellipses=TRUE) + +# Show information about clustering +summary(cec) diff --git a/demo/samples/cec.iris.R b/demo/samples/cec.iris.R new file mode 100644 index 00000000..ab8b31eb --- /dev/null +++ b/demo/samples/cec.iris.R @@ -0,0 +1,26 @@ +#' --- +#' title: "PCA applied for iris data set clustered with CEC" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# Load iris data set (dimension = 4) +dataset_points <- iris + +# Load our library +library(gmum.r) + +# Try to fit 4 gaussians to the data set +cec <- CEC(x=dataset_points, k=3, method.type = "spherical") + +# Scatterplot of the data set clustered with CEC +plot(cec, slice=c(1,2,3,4)) + +# Apply PCA in plotting; +plot(cec, pca=TRUE) + +# Show info about clustering +summary(cec) diff --git a/demo/samples/compile.R b/demo/samples/compile.R new file mode 100644 index 00000000..6347627d --- /dev/null +++ b/demo/samples/compile.R @@ -0,0 +1,53 @@ +library(rmarkdown) + +# Note: rmarkdown has some bugs, so I cannot do here loop, and also +# sometimes you have to terminate R and restart to compile successfuly ;) + +highlighting <- "zenburn" +options(warn=-1) +render("svm.2e.R", output_options=c(highlight=highlighting)) +render("svm.basic.R", output_options=c(highlight=highlighting)) +render("svm.caret.R", output_options=c(highlight=highlighting)) +render("svm.multiclass.R", output_options=c(highlight=highlighting)) +render("svm.transductive.R", output_options=c(highlight=highlighting)) +render("svm.example.weights.R", output_options=c(highlight=highlighting)) +render("svm.news20.times.R", output_options=c(highlight=highlighting)) + +render("gng.online.R", output_options=c(highlight=highlighting)) +render("gng.wine.R", output_options=c(highlight=highlighting)) +render("gng.optimized.R", output_options=c(highlight=highlighting)) +render("gng.mouse.R", output_options=c(highlight=highlighting)) + + +render("cec.basic.R", output_options=c(highlight=highlighting)) +render("cec.ellipse.R", output_options=c(highlight=highlighting)) +render("cec.iris.R", output_options=c(highlight=highlighting)) +render("cec.T.dataset.R", output_options=c(highlight=highlighting)) + +# THIS IS REALLY TRICKY - we generate not self contained files +# and then link them to gmum.r FTP server + +scripts <- list.files(".", pattern = glob2rx("*.R"), full.names = FALSE) +# NOTE: Run once for each script online! + +# TO RUN FOR PART REPLACE SCRIPTS WITH LIST OF FILES YOU WISH TO UPDATE +run.cmds <- function(script.name){ + file.folder <- paste(substr(script.name, 1, nchar(script.name)-2), "_files", sep="") + replace_command <- + paste("sed -i 's/",file.folder, + "/http:\\/\\/gmum.net\\/files\\/gmum.r\\/sample\\/",file.folder,"/g' *.html", sep="") + system(replace_command) + rem_command <- paste("rm -r -f ", file.folder, "/boot*", sep="") + system(rem_command) + rem_command <- paste("rm -r -f ", file.folder, "/jquery*", sep="") + system(rem_command) +} + +system.file("sed -i '/.*boot.*/d' *.html") +system.file("sed -i '/.*jquery.*/d' *.html") + +for(script in scripts){ + run.cmds(script) +} + + diff --git a/demo/samples/gng.mouse.R b/demo/samples/gng.mouse.R new file mode 100644 index 00000000..8d14680c --- /dev/null +++ b/demo/samples/gng.mouse.R @@ -0,0 +1,53 @@ +#' --- +#' title: "Predicting closest centroid in GNG" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +library(gmum.r) +library(ggplot2) + +# Load our library dataset +data(cec.mouse1.spherical) + +# Train GNG model and find centroids of the resulting graph +g <- GNG(cec.mouse1.spherical, max.nodes=40) + +# Convert to igraph and plot resulting graph +plot(convertToIGraph(g)) + +# GNG aims at making it easy to work with its graph. +# Here we will predict closest centroid +mouse.centr <- calculateCentroids(g) + +# Now we can plot results to see decision boundary for assigning node to centroid +m = as.data.frame(cec.mouse1.spherical) +colnames(m) = c("x", "y") + +x_col <- cec.mouse1.spherical[,1] +y_col <- cec.mouse1.spherical[,2] + +x_max <- max(x_col) +x_min <- min(x_col) +y_max <- max(y_col) +y_min <- min(y_col) + +x_axis <- seq(from=x_min, to=x_max, length.out=30) +y_axis <- seq(from=y_min, to=y_max, length.out=30) +grid <- data.frame(x_axis,y_axis) +grid <- expand.grid(x=x_axis,y=y_axis) + + + +target <- findClosests(g, node.ids=mouse.centr, x=grid) +grid["target"] <- target + + +pl <- ggplot()+ + geom_tile(data=grid, aes(x=x,y=y, fill=factor(target))) + theme(legend.position="none") + + geom_point(data=m, aes(x,y), color='white') + scale_size_continuous(range = c(3, 6)) +plot(pl) + diff --git a/demo/samples/gng.online.R b/demo/samples/gng.online.R new file mode 100644 index 00000000..e296a11a --- /dev/null +++ b/demo/samples/gng.online.R @@ -0,0 +1,43 @@ +#' --- +#' title: "Online training in GNG" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- +library(gmum.r) + +# If you decide to use this advanced feature, pass dimensionality of the data +gng <- GNG(max.nodes=100, train.online=TRUE, dim=3, verbosity=3) + +# Construct exemplary spherical dataset with labels +ex <- gng.preset.sphere(N=10000) +labels <- round(runif(10000)*3) + +# Insert dataset with labels +insertExamples(gng, ex, labels) + + +# Run algorithm in parallel +# (GNG will be running in separate thread!) +run(gng) + +# Wait for it to converge +Sys.sleep(5.0) + +# It is easy to plot dataset with error curve! +plot(gng, mode="2d.errors", vertex.size=6) + +# We can add another dataset +cube.dataset <- gng.preset.cube(N=10000, r=1.0, center=c(1.0,1.0,1.0)) +insertExamples(gng, cube.dataset) + +# Wait for it to converge +Sys.sleep(5.0) + +# Plot again. +plot(gng, mode="2d.errors", vertex.size=6) + +# Terminate algorithm +terminate(gng) diff --git a/demo/samples/gng.optimized.R b/demo/samples/gng.optimized.R new file mode 100644 index 00000000..dad8a992 --- /dev/null +++ b/demo/samples/gng.optimized.R @@ -0,0 +1,32 @@ +#' --- +#' title: "Optimized version of GNG" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# For advanced used you might want to consider using OptimizedGNG +# , which asymptotically much faster implementation without +# sacrifying quality of the graph + +# GNG unique capability is online training + +library(gmum.r) + +# Construct 10^6 points spherical dataset. Might take a while! +sphere.dataset <- gng.preset.sphere(N=10000) + +# If you decide to use this advanced feature, you have to pass a +# range (bounding box) in which all of passed data feature values will reside +gng <- OptimizedGNG(sphere.dataset, max.nodes=1000, max.iter=10000, dim=3, + value.range=c(0,1)) + +# GNG would much longer, you can check it yourself +# gng <- GNG(sphere.dataset, max.nodes=1000, max.iter=10000, dim=3) + +findClosests(gng, calculateCentroids(gng), sphere.dataset) + +# Plot results using spatial coordinates +plot(gng, layout=gng.plot.layout.v2d, vertex.size=6) diff --git a/demo/samples/gng.wine.R b/demo/samples/gng.wine.R new file mode 100644 index 00000000..f25713f3 --- /dev/null +++ b/demo/samples/gng.wine.R @@ -0,0 +1,47 @@ +#' --- +#' title: "GNG igraph integration on wine dataset" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- +#' +library(gmum.r) +library(caret) # For ConfusionMatrix + +wine <- get.wine.dataset.X(scale=TRUE) + +# Train in an offline manner +gng <- GNG(wine, labels=get.wine.dataset.y(), max.nodes=20, + max.iter=10000, min.improvement=1e-1) + +# Print number of nodes +numberNodes(gng) + +# Convert to igraph directly! +ig <- convertToIGraph(gng) + +# Print mean degree of the network +mean(degree(ig)) + +# You can access different attributes of the nodes +V(ig)$error[1] + +# Plot using igraph layout +plot(gng, vertex.color="label", layout=igraph::layout.fruchterman.reingold, + vertex.size=9) + +# Print summary of trained object +summary(gng) + +# You can use graph to predict new samples +# (in a closest neighbour way) +preds <- c() +for(i in 1:nrow(wine)){ + preds <- c(preds,round(node(gng, predict(gng, wine[i,]))$label)) +} + +# Print prediction statistics +confusionMatrix(table(preds, get.wine.dataset.y())) + diff --git a/demo/samples/svm.2e.R b/demo/samples/svm.2e.R new file mode 100644 index 00000000..3c8e1406 --- /dev/null +++ b/demo/samples/svm.2e.R @@ -0,0 +1,78 @@ +#' --- +#' title: "Two Ellipsoid SVM preprocessing technique" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +# For linear kernel gmum.R provides new preprocessing technique +# called Two Ellipsoid SVM, which accounts for different covariances +# of classes +library(gmum.r) +library(caret) +data(svm_breast_cancer_dataset) +ds <- svm.breastcancer.dataset + +# We can trigger 2e by passing prep parameter. It can work with either of the libraries +svm.2e <- SVM(x=ds[,-1], y=ds[,1], core="libsvm", kernel="linear", prep = "2e", C=10); +acc.2e <- svm.accuracy(prediction=predict(svm.2e, ds[,-1]), target=ds[,1]) + + +svm <- SVM(x=ds[,-1], y=ds[,1], core="libsvm", kernel="linear", C=10); +acc <- svm.accuracy(prediction=predict(svm, ds[,-1]), target=ds[,1]) + +# Seems that 2e preprocessing helps! + +print(paste("LIBSVM + linear kernel accuracy: ", round(acc,3), sep="")) +print(paste("LIBSVM + linear kernel + 2e accuracy: ", round(acc.2e,3), sep="")) + +plot(svm.2e, mode="pca") +plot(svm, mode="pca") + +# We can also perform rigorous CV on breast cancer dataset + +ds$X1 <- as.factor(ds$X1) # Caret requires passing factor for classification :) +inTraining <- createDataPartition(ds$X1, p = .75, list = FALSE) +training <- ds[ inTraining,] +testing <- ds[-inTraining,] + +fitControl <- trainControl(method = "cv", + ## 8-fold CV... + number = 8, + repeats = 1, + verboseIter=FALSE +) + +# First fit standard linear SVM using LIBSVM +fit.results <- train(X1 ~ ., data = training, + method = caret.gmumSvmLinear, + preProc = c("center", "scale"), + tuneLength = 8, + seed = 777, + trControl = fitControl, + tuneGrid = expand.grid(C=10^(c(-3:3))), + core = "libsvm", # gmum.R parameter - pick library + verbosity = 0 # no outputs +) + +# Now use 2e preprocessing +fit.results.2e <- train(X1 ~ ., data = training, + method = caret.gmumSvmLinear, + preProc = c("center", "scale"), + tuneLength = 8, + trControl = fitControl, + seed = 777, + tuneGrid = expand.grid(C=10^(c(-3:3))), + core = "libsvm", # gmum.R parameter - pick library + verbosity = 0, # no outputs + prep = "2e" # Use 2e preprocessing +) + +acc <- svm.accuracy(predict(fit.results$finalModel, testing[,-1]), testing[,1]) +acc.2e <- svm.accuracy(predict(fit.results.2e$finalModel, testing[,-1]), testing[,1]) + +print(paste("LIBSVM + linear kernel 8 CV accuracy: ", round(acc,3), sep="")) +print(paste("LIBSVM + linear kernel + 2e 8 CV accuracy: ", round(acc.2e,3), sep="")) + diff --git a/demo/samples/svm.basic.R b/demo/samples/svm.basic.R new file mode 100644 index 00000000..16d3b403 --- /dev/null +++ b/demo/samples/svm.basic.R @@ -0,0 +1,21 @@ +#' --- +#' title: "Basic classification on breast cancer dataset" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +library(gmum.r) + +# We will perform basic classification on breast cancer dataset +# using LIBSVM with linear kernel +data(svm_breast_cancer_dataset) + +# We can pass either formula or explicitly X and Y +svm <- SVM(X1 ~ ., svm.breastcancer.dataset, core="libsvm", kernel="linear", C=10) + +pred <- predict(svm, svm.breastcancer.dataset[,-1]) + +plot(svm, mode="pca") \ No newline at end of file diff --git a/demo/samples/svm.caret.R b/demo/samples/svm.caret.R new file mode 100644 index 00000000..8e93fc27 --- /dev/null +++ b/demo/samples/svm.caret.R @@ -0,0 +1,52 @@ +#' --- +#' title: "Caret support in gmum.R" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + + +# We support caret, so that you can construct +# easily complex experiments using gmum.R +library(gmum.r) +library(caret) +library(mlbench) # For Sonar dataset +set.seed(777) + +# Task to discriminate between sonar signals +# bounced off a metal cylinder or rougly cylindrical rock +data(Sonar) + +inTraining <- createDataPartition(Sonar$Class, p = .75, list = FALSE) +training <- Sonar[ inTraining,] +testing <- Sonar[-inTraining,] + +fitControl <- trainControl(method = "cv", + ## 10-fold CV... + number = 10, + ## repeated 2 times + repeats = 1, + verboseIter=FALSE +) + + +# Caret support is provided by a set of care methods +# You can choose between: caret.gmumSvmRadial, caret.gmumSvmLinear +# and caret.gmumSvmPoly +model <- train(Class ~ ., data = training, + method = caret.gmumSvmRadial, + preProc = c("center", "scale"), + tuneLength = 8, + trControl = fitControl, + tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), + core = "libsvm", # gmum.R parameter - pick library + verbosity = 0 # no outputs + ) + +# Print 10-fold CV experiment results +print(model) + +# Seems that gamma=0.01 and and C=100 was best choice +plot(model) diff --git a/demo/samples/svm.example.weights.R b/demo/samples/svm.example.weights.R new file mode 100644 index 00000000..11bd0286 --- /dev/null +++ b/demo/samples/svm.example.weights.R @@ -0,0 +1,29 @@ +#' --- +#' title: "Example weight support in gmum.R" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + +library(gmum.r) +library(ggplot2) +library(SparseM) +library(Matrix) + +# Initialize exemplary data +x <- cbind(c(2.76405235, 1.97873798, 2.86755799, 1.95008842, 0.89678115, 1.14404357, 1.76103773, 1.44386323, 2.49407907, 1.3130677, -2.55298982, 0.8644362, 2.26975462, 0.04575852, 1.53277921, 0.15494743, -0.88778575, -0.34791215, 1.23029068, -0.38732682), c(1.40015721, 3.2408932, 0.02272212, 0.84864279, 1.4105985, 2.45427351, 1.12167502, 1.33367433, 0.79484174, 0.14590426, 0.6536186, -0.74216502, -1.45436567, -0.18718385, 1.46935877, 0.37816252, -1.98079647, 0.15634897, 1.20237985, -0.30230275)) +y <- c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1) +weights <- c(1.04855297, 1.42001794, 1.70627019, 1.9507754, 0.50965218, 0.4380743, 1.25279536, 0.77749036, 1.61389785, 3.1911042, 0.89546656, 0.3869025, 0.51080514, 1.18063218, 0.02818223, 2.14165935, 0.33258611, 1.51235949, 3.17161047, 1.81370583) + + +df <- data.frame(x, y) + +svm.rbf <- SVM(formula=y~., data=df, core="libsvm", kernel="rbf", C=1.0, gamma=0.5) +weighted.svm.rbf <- SVM(formula=y~., data=df, core="svmlight", kernel="rbf", C=1.0, + gamma=0.5, example.weights=weights) + +# Plot to see how decision boundary is affected +plot(svm.rbf, mode="contour") +plot(weighted.svm.rbf, mode="contour", radius.max=10) diff --git a/demo/samples/svm.multiclass.R b/demo/samples/svm.multiclass.R new file mode 100644 index 00000000..ba32ab6c --- /dev/null +++ b/demo/samples/svm.multiclass.R @@ -0,0 +1,30 @@ +#' --- +#' title: "OVA and OVR multiclass classification" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + + +# Use SVM to run multiclass prediction (OVA or OVR) +library(gmum.r) + +data(iris) + +# One versus all is solving K subproblems +sv.ova <- SVM(Species ~ ., data=iris, class.type="one.versus.all", verbosity=0) +preds <- predict(sv.ova, iris[,1:4]) +acc.ova <- sum(diag(table(preds, iris$Species)))/sum(table(preds, iris$Species)) + +# One versus one is solving K(K-1)/2 subproblems (one for each pair) +sv.ovo <- SVM(x=iris[,1:4], y=iris[,5], class.type="one.versus.one", verbosity=0) +preds <- predict(sv.ovo, iris[,1:4]) +acc.ovo <- sum(diag(table(preds, iris$Species)))/sum(table(preds, iris$Species)) + +plot(sv.ova) +# OVO schemes gets 2 more examples from +# training set classified correctly +plot(sv.ovo) + diff --git a/demo/samples/svm.news20.times.R b/demo/samples/svm.news20.times.R new file mode 100644 index 00000000..c15f9897 --- /dev/null +++ b/demo/samples/svm.news20.times.R @@ -0,0 +1,103 @@ +#' --- +#' title: "Comparing times of gmum.r SVM and e1071 on large data" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- +#' +library(gmum.r) +library(caret) +library(e1071) +library(R.utils) + +# download news20 dataset +download.file("http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/news20.binary.bz2", destfile="news20.binary.bz2") +bunzip2("news20.binary.bz2") +news20 <- read.matrix.csr("news20.binary") +file.remove("news20.binary") +news20.part <- list(y=(news20$y)[1:5000], x=news20$x[1:5000, ]) + +# acquire data and labels +x <- news20.part$x +y <- news20.part$y + +# the dataset has pretty high dimensionality +print(ncol(x)) + +# prepare vectors for saving evaluation times +lens <- c() +gmum_train_times <- c() +e_train_times <- c() +gmum_test_times <- c() +e_test_times <- c() + +# run the main loop +for (p in seq(0.01, 0.3, 0.01)){ + # partitom the data + train <- as.integer(createDataPartition(y, p=p, list=FALSE)) + + # save number of traning examples + lens <- c(lens, length(train)) + + # train gmum.r SVM + train_start <- proc.time() + gmum_svm <- SVM(x[train],y[train], core="svmlight", kernel="linear", C=1, gamma=1) + gmum_train <- as.numeric((proc.time() - train_start)[3]) + + # calculate gmum.r training time + gmum_train_times <- c(gmum_train_times, gmum_train) + + # train e1071 SVM + train_start <- proc.time() + e_svm <- e1071::svm(x=x[train], y=y[train], type='C-classification', kernel="linear", cost=1, gamma=1, scale=FALSE, fitted=FALSE) + e_train <- as.numeric((proc.time() - train_start)[3]) + + # calculate e1071 training time + e_train_times <- c(e_train_times, e_train) + + # test gmum.r SVM + test_start <- proc.time() + gmum_pred <- predict(gmum_svm, x[-train]) + gmum_test <- as.numeric((proc.time() - test_start)[3]) + + # calculate gmum testing + gmum_test_times <- c(gmum_test_times, gmum_test) + + # test e1071 + test_start <- proc.time() + e_pred <- predict(e_svm, x[-train]) + e_test <- as.numeric((proc.time() - test_start)[3]) + + # and calculate its testing time + e_test_times <- c(e_test_times, e_test) +} + +# reverse testing data as numbers were decreasing +test_lens <- rep(nrow(x), length(lens)) - lens +gmum_test_times <- rev(gmum_test_times) +e_test_times <- rev(e_test_times) + +# preapre data for plotting +library(reshape2) +train_times <- melt(data.frame(x=lens, gmum.r=gmum_train_times, e1071=e_train_times), id='x') +test_times <- melt(data.frame(x=test_lens, gmum.r=gmum_test_times, e1071=e_test_times), id='x') + +# load plotting library +library(ggplot2) + +# and use some ggplot2 magic +ggplot(data=train_times, aes(x=x, y=value, colour=variable)) + + geom_line() + + geom_text(data=train_times[train_times$x==1452,], aes(hjust=1.1, label=variable, colour=variable)) + + theme(legend.position="none") + + xlab("samples number") + + ylab("time") + +ggplot(data=test_times, aes(x=x, y=value, colour=variable)) + + geom_line() + + geom_text(data=test_times[test_times$x==4900,], aes(hjust=1.1, label=variable, colour=variable)) + + theme(legend.position="none") + + xlab("samples number") + + ylab("time") diff --git a/demo/samples/svm.transductive.R b/demo/samples/svm.transductive.R new file mode 100644 index 00000000..37eb83f0 --- /dev/null +++ b/demo/samples/svm.transductive.R @@ -0,0 +1,42 @@ +#' --- +#' title: "SVMLight transductive learning" +#' author: "" +#' date: "" +#' output: +#' html_document: +#' self_contained: false +#' --- + + +# We enable different unique features, for instance transductive learning in SVMLight +library(gmum.r) +library(e1071) + +data(svm.transduction) + +# Prepare data +train.transduction <- svm.transduction$tr +train.induction <- svm.transduction$ind +test <-svm.transduction$test +test$x <- train.transduction$x[11:610, ] # Saves some space :) + +svm.induction <- SVM(x=train.induction$x, y=train.induction$y, core="svmlight") +svm.induction.pred <- predict(svm.induction, test$x) + +# We pass special labels for transductive learning +train.transduction$y[10:12] + +# Train transductive, it takes longer as it uses much more data +svm.transduction <- SVM(x=train.transduction$x, y=train.transduction$y, + transductive.learning=TRUE, + core="svmlight") +svm.transduction.pred <- predict(svm.transduction, test$x) + +# As expected - transductively trained model performs much better on test data +svm.accuracy(svm.induction.pred, test$y) +svm.accuracy(svm.transduction.pred, test$y) + +# Plot projection of data onto first 2 dimensions +plot(svm.transduction, X=test$x) +plot(svm.induction, X=test$x) + diff --git a/demo/svm-demo.R b/demo/svm-demo.R new file mode 100644 index 00000000..2f928d6a --- /dev/null +++ b/demo/svm-demo.R @@ -0,0 +1,46 @@ +library(gmum.r) +library(caret) + +# Load a dataset, here we have provided an example +data(svm_breast_cancer_dataset) +ds <- svm.breastcancer.dataset + +# Create CV folds +K <- 5 + +folds <- createFolds(ds$X1, k=K) +mean_acc <- 0 + +# SVM model needs to know how the labels depend on data +formula <- X1~. + +# Iterate through folds +for ( i in seq(1,K,1) ) { + + # Get training and testing data + train <- ds[-folds[[i]],] + test <- ds[folds[[i]],] + + # Train SVM model + svm <- SVM(formula, train, lib="libsvm", kernel="linear", prep = "2e", C=10); + + # Plot one of the SVMs using PCA + if (i == 1) plot(svm, mode="pca") + + # Seperate lables in test data + test_x <- subset(test, select = -c(X1)) + target <- test[,"X1"] + + # predict on test data + pred <- predict(svm, test_x) + + # calculate classification accuracy + acc <- svm.accuracy(prediction=pred, target=target) + mean_acc <- mean_acc + acc +} + +# Display mean accuracy +print(sprintf("mean SVM accuracy after %i folds: %f ", K, mean_acc/K)) + +# Print short summray of the last trained svm +summary(svm) diff --git a/doc/cec/img/cec_mouse.png b/doc/cec/img/cec_mouse.png new file mode 100644 index 00000000..a3a106fb Binary files /dev/null and b/doc/cec/img/cec_mouse.png differ diff --git a/doc/cec/papers/detection-of-elliptical-shapes-via-cross-entropy.pdf b/doc/cec/papers/detection-of-elliptical-shapes-via-cross-entropy.pdf new file mode 100644 index 00000000..61c032db Binary files /dev/null and b/doc/cec/papers/detection-of-elliptical-shapes-via-cross-entropy.pdf differ diff --git a/doc/cec/papers/formulas.pdf b/doc/cec/papers/formulas.pdf new file mode 100644 index 00000000..90e1c145 Binary files /dev/null and b/doc/cec/papers/formulas.pdf differ diff --git a/doc/cec/papers/formulas.tex b/doc/cec/papers/formulas.tex new file mode 100644 index 00000000..205887c2 --- /dev/null +++ b/doc/cec/papers/formulas.tex @@ -0,0 +1,105 @@ +\documentclass[10pt,a4paper]{article} +\usepackage[latin1]{inputenc} +\usepackage{amsmath,amsfonts,amssymb,amsthm,dsfont} + +\def\F{\mathcal{F}} +\def\G{\mathcal{G}} +\def\I{\mathcal{I}} +\def\y{\mathcal{\mu}} +\def\m{m} +\def\card{\mathrm{card}} + +\def\diam{\mathrm{diam}} + +\def\det{\mathrm{det}} + +\def\tr{\mathrm{tr}} + +\def\cov{\mathrm{cov}} +\begin{document} + + +\section{Entropy formulas} +\begin{description} + $\G$ - family of all normal distributions \newline + $\G_A$ - where $A$ proper matrix(square, symetric positvelly defined) is a subfamilly of $\G$ which covariance equals $A$ + \newline + $\G_{(\cdot \I)} = \cup_{r \in \mathbb{R} - \{0\}} \G_{r\cdot \I} + +\end{description} + +\begin{table}\centering + +\begin{tabular}{||l|l|l||} \hline \hline + +$\F$ & $\Sigma_{\F}(\y)$ & $H^{\times}(\y\|\F)$ \\[0.5ex] + +\hline \hline + +$\G_{\Sigma}$ & $\Sigma$ & $\frac{N}{2} \ln(2\pi)+\frac{1}{2}\tr(\Sigma^{-1}\Sigma_{\y})+\frac{1}{2}\ln \det(\Sigma)$ + +\\[0.5ex] \hline + +$\G_{r\I}$ & $r\I$ & + +$\frac{N}{2}\ln(2\pi)+\frac{1}{2r}\tr(\Sigma_{\y})+\frac{N}{2}\ln r$ \\[0.5ex] \hline + +$\G_{(\cdot\I)}$ & $\frac{\tr(\Sigma_{\y})}{N} \I$ & $\frac{N}{2}\ln(2\pi e/N)+\frac{N}{2}\ln (\tr \Sigma_\y)$ \\[0.5ex] \hline + +$\G_{\mathrm{diag}}$ & $\mathrm{diag}(\Sigma_{\y})$ & $\frac{N}{2}\ln(2\pi e)+\frac{1}{2}\ln(\det(\mathrm{diag}(\Sigma_\y)))$ \\[0.5ex] + +\hline + +%$\G_{\det=A}$ & $(A/\det \Sigma_{\mu})^{1/N} + +%\Sigma_{\mu}$ & $\frac{N}{2} \ln(2\pi)+\frac{N}{2}(\det \Sigma_{\mu}/A)^{1/N}+\frac{1}{2}\ln(A)$ \\[0.5ex] \hline + +$\G$ & $\Sigma_{\y}$ & $\frac{N}{2}\ln(2\pi e)+\frac{1}{2}\ln \det(\Sigma_{\y})$ \\[0.5ex] \hline \hline + +\end{tabular} + +\caption{Table of cross-entropy formulas with respect to Gaussian subfamilies.} + +\label{tab1:cec} + +\end{table} + + +\section{Cluster formulas} +Assume we we have a cluster $A$ with parameters $\l,\m,\Sigma$ and we add to this cluster point $y$ we will get a new cluter $A_{+y}$ with paramter given by formulas: +$$ + +\begin{array}{rcl} + +\l_{+y} & = & l+1, \\[1ex] + +\m_{+y} & = & \frac{l\m+y}{l+1}, \\[1ex] + +\Sigma_{+y} & = & \frac{l}{l+1}[\Sigma+\frac{1}{l+1}(\m-y)(\m-y)^T]. + +\end{array} + +$$ + +Let's assume we'll substract point $ y $ from cluster $A$ our new cluster will have parameters given by formula : + +$$ + +\begin{array}{rcl} + +l_{-y} & = & l-1, \\[1ex] + +\m_{-y} & = & \frac{l}{l-1}\m-\frac{1}{l-1} y, \\[1ex] + +\Sigma_{-y} & = & \frac{l}{l-1}[\Sigma-\frac{1}{l-1}(\m-y)(\m-y)^T], . + +\end{array} + +$$ + + + + + + +\end{document} \ No newline at end of file diff --git a/doc/cec/papers/gaussformulas.png b/doc/cec/papers/gaussformulas.png new file mode 100644 index 00000000..06fb5e8e Binary files /dev/null and b/doc/cec/papers/gaussformulas.png differ diff --git a/doc/cec/papers/hartigan.gif b/doc/cec/papers/hartigan.gif new file mode 100644 index 00000000..a2d19af2 Binary files /dev/null and b/doc/cec/papers/hartigan.gif differ diff --git a/doc/cec/papers/hartigan.pdf b/doc/cec/papers/hartigan.pdf new file mode 100644 index 00000000..fd63e29c Binary files /dev/null and b/doc/cec/papers/hartigan.pdf differ diff --git a/doc/gng/gng.pdf b/doc/gng/gng.pdf new file mode 100644 index 00000000..b7a98954 Binary files /dev/null and b/doc/gng/gng.pdf differ diff --git a/doc/gng/img/ex1.png b/doc/gng/img/ex1.png new file mode 100644 index 00000000..90fe694d Binary files /dev/null and b/doc/gng/img/ex1.png differ diff --git a/doc/gng/img/ex2.png b/doc/gng/img/ex2.png new file mode 100644 index 00000000..3cfb105f Binary files /dev/null and b/doc/gng/img/ex2.png differ diff --git a/doc/gng/img/ex3.png b/doc/gng/img/ex3.png new file mode 100644 index 00000000..b50931c8 Binary files /dev/null and b/doc/gng/img/ex3.png differ diff --git a/doc/gng/img/gng_readme.png b/doc/gng/img/gng_readme.png new file mode 100644 index 00000000..fd78680c Binary files /dev/null and b/doc/gng/img/gng_readme.png differ diff --git a/doc/gng/img/mnist1.png b/doc/gng/img/mnist1.png new file mode 100644 index 00000000..e76d8737 Binary files /dev/null and b/doc/gng/img/mnist1.png differ diff --git a/doc/gng/img/plot_speed.png b/doc/gng/img/plot_speed.png new file mode 100644 index 00000000..0f5a2aab Binary files /dev/null and b/doc/gng/img/plot_speed.png differ diff --git a/doc/img/svm_readme.png b/doc/img/svm_readme.png new file mode 100644 index 00000000..f3d830a5 Binary files /dev/null and b/doc/img/svm_readme.png differ diff --git a/doc/svm/img/2e.png b/doc/svm/img/2e.png new file mode 100644 index 00000000..d5db3cbc Binary files /dev/null and b/doc/svm/img/2e.png differ diff --git a/doc/svm/svm.pdf b/doc/svm/svm.pdf new file mode 100644 index 00000000..a752dc3a Binary files /dev/null and b/doc/svm/svm.pdf differ diff --git a/inst/CITATION b/inst/CITATION new file mode 100644 index 00000000..409ba760 --- /dev/null +++ b/inst/CITATION @@ -0,0 +1,44 @@ +citHeader("If you use any of the models from gmum.R package please cite appropriate paper (use toBibtex to show LateX entries):") + +citEntry(entry="inProceedings", + year="2013", + isbn="978-3-319-00968-1", + booktitle="Proceedings of the 8th International Conference on Computer Recognition Systems CORES 2013", + volume="226", + series="Advances in Intelligent Systems and Computing", + doi="10.1007/978-3-319-00969-8_62", + title="Density Invariant Detection of Osteoporosis Using Growing Neural Gas", + url="http://dx.doi.org/10.1007/978-3-319-00969-8_62", + publisher="Springer International Publishing", + author="Podolak, IgorT. and Jastrzebski, StanislawK.", + pages="629-638", + textVersion="Podolak, Igor T., Jastrzebski, Stanislaw K. (2013) Density Invariant Detection of Osteoporosis Using Growing Neural Gas.", +) + +citEntry(entry="article", +title = "Two ellipsoid Support Vector Machines ", +textVersion = "Czarnecki, Wojciech M., Tabor Jacek (2014) Two ellipsoid Support Vector Machines.", +journal = "Expert Systems with Applications ", +volume = "41", +number = "18", +pages = "8211 - 8224", +year = "2014", +issn = "0957-4174", +url = "http://www.sciencedirect.com/science/article/pii/S0957417414004138", +author = "Wojciech Marian Czarnecki and Jacek Tabor" +) + +citEntry(entry="article", +title = "Cross-entropy clustering ", +textVersion = "Tabor Jacek, Spurek Przemyslaw (2014) Cross-entropy clustering,", +journal = "Pattern Recognition ", +volume = "47", +number = "9", +pages = "3046 - 3059", +year = "2014", +issn = "0031-3203", +url = "http://www.sciencedirect.com/science/article/pii/S0031320314000764", +author = "J. Tabor and P. Spurek" +) + + diff --git a/inst/dev/datasets/cec/EllipseGauss/cluster.txt b/inst/dev/datasets/cec/EllipseGauss/cluster.txt new file mode 100644 index 00000000..6baeff13 --- /dev/null +++ b/inst/dev/datasets/cec/EllipseGauss/cluster.txt @@ -0,0 +1,3200 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +0 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +0 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +1 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +1 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +3 +3 +3 +3 +3 +3 +3 +3 +3 +0 +0 +0 +3 +3 +0 +3 +0 +2 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +2 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +3 +0 +3 +3 +3 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +3 +3 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +2 +0 +0 +3 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +3 +3 +0 +3 +0 +3 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +2 +3 +0 +3 +0 +0 +0 +3 +0 +0 +0 +0 +2 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +0 +2 +3 +0 +3 +3 +0 +0 +3 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +3 +0 +3 +3 +0 +0 +0 +3 +0 +3 +0 +3 +0 +0 +0 +2 +3 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +3 +3 +0 +0 +3 +3 +3 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +3 +3 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +3 +3 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +3 +3 +0 +0 +0 +3 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +3 +0 +0 +0 +3 +0 +0 +3 +0 +0 +0 +3 +3 +0 +3 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +3 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +2 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +3 +0 +2 +0 +0 +3 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +3 +0 +3 +0 +0 +0 +3 +3 +0 +0 +3 +3 +0 +0 +0 +0 +0 +0 +0 +0 +2 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +0 +0 +3 +0 +3 +3 +0 +0 +0 +0 +3 +0 +0 +3 +3 +0 +0 +2 +0 +3 +0 +0 +0 +0 +0 +0 +3 +0 +3 +0 +0 +3 +3 +0 +0 +0 +0 +0 +0 +3 +0 +0 +0 +0 +3 +0 +3 +0 +3 +0 +0 +0 +0 +3 +0 +0 +3 +0 +2 +3 diff --git a/inst/dev/datasets/cec/EllipseGauss/dimension.txt b/inst/dev/datasets/cec/EllipseGauss/dimension.txt new file mode 100644 index 00000000..0cfbf088 --- /dev/null +++ b/inst/dev/datasets/cec/EllipseGauss/dimension.txt @@ -0,0 +1 @@ +2 diff --git a/inst/dev/datasets/cec/EllipseGauss/energy.txt b/inst/dev/datasets/cec/EllipseGauss/energy.txt new file mode 100644 index 00000000..d84743d4 --- /dev/null +++ b/inst/dev/datasets/cec/EllipseGauss/energy.txt @@ -0,0 +1 @@ +2.111841 diff --git a/inst/dev/datasets/cec/EllipseGauss/input.txt b/inst/dev/datasets/cec/EllipseGauss/input.txt new file mode 100644 index 00000000..692458b6 --- /dev/null +++ b/inst/dev/datasets/cec/EllipseGauss/input.txt @@ -0,0 +1,3200 @@ +-4.490745375326851 7.063554210723172 +-3.057383209127301 5.584077432601891 +-1.6681737065702633 4.097494908345532 +0.03917893901452835 2.3022340123684084 +0.8332382357068195 1.504216304658903 +-0.7121830900657378 3.067482118393671 +1.6090855755716287 0.658255001430017 +-1.2204342003371207 3.645630887063003 +-2.9519889731179223 5.441805045088852 +1.4992163008611457 0.706610760841381 +-2.7115118687476873 5.218228695740373 +-0.9785671722467151 3.4168227179353297 +-1.3611085079447292 3.7754519197688863 +0.8774119888353729 1.4096945910671688 +0.4935871882292319 1.8238972836931717 +-0.7195608678237679 3.0688776787019694 +-2.2815793605511887 4.799378406948666 +-3.280700723715809 5.846162510062376 +0.6570452538851803 1.6092532415692609 +-0.3812625551928961 2.7299555593540235 +-2.5405793125575005 5.0404614985352225 +0.1755474055779332 2.1434310745422036 +0.6687011978252779 1.652659920103721 +2.194295584589035 -0.032214388974508346 +2.157545810964355 0.02211493322875846 +-1.3347106517311338 3.779837048077005 +-3.4865567552073267 6.0368455813762 +-2.5458179302465456 5.012412202898538 +0.12304579241369401 2.2044105382348076 +-1.0212348207618545 3.4325143830085527 +-0.32523519001050416 2.6757157688951896 +1.5326098401963841 0.7741907700168884 +-0.24047179070514696 2.5749685898106334 +-0.3174466200838871 2.6661654166306046 +-2.8228081729869188 5.336126592773428 +0.8076481513236287 1.471579301773246 +0.4351278172971906 1.8664174431445884 +-1.0736246153770608 3.4473424548906464 +1.9911903926326413 0.2579604545239982 +-2.4311610056499666 4.85513089560399 +-0.2861349793801072 2.5926161036751894 +2.3338538893737466 -0.14754598848516487 +2.1690517399813083 0.031557785621134204 +-1.7161491766546209 4.125031797490934 +0.3343359431395065 1.9992353684691846 +-0.6288812448428085 3.0067416793298416 +-2.9274147519266727 5.457085497135985 +1.0711756775933148 1.2496294045054472 +-1.2643863389584835 3.6584441712661073 +0.9176917374099147 1.3644424387025471 +1.9786521740366474 0.23957444349649126 +1.0681395574947696 1.1593103690352282 +-2.8413740987616296 5.419668451369779 +-2.57957330202623 5.055282032119745 +-0.7268074376461651 3.100343602556961 +0.4875891193453638 1.8145986823683886 +-2.1112811196260264 4.569834409975243 +0.6264703426240621 1.6997925735904298 +-0.7984990940286387 3.2058705031456474 +-2.224832903363515 4.691477213574328 +1.4177314615548784 0.8716519404606575 +-1.3332083520554074 3.8009312186664106 +-1.953913998808354 4.403808514606104 +-0.664914776598641 3.0262678192061347 +-0.3373571551832815 2.7108751367618464 +-0.5159770383793536 2.9170644434125794 +-1.4634325323283368 3.933361094386724 +0.5782652837757536 1.6920067464471562 +-0.8625838195340274 3.267473834685362 +0.7713138265192299 1.5289511566937346 +-0.31724296619789827 2.6842258834901283 +-0.7717445205318867 3.154644081268053 +1.4837154823525165 0.806976719492787 +-0.31340924207654025 2.6549123811807283 +1.1951760919411232 1.0601480170160507 +1.8446303935087676 0.3529366701006498 +0.5443609213344476 1.748020719908163 +0.5034397556189822 1.8013770102375397 +-2.7133310121324916 5.213391689288482 +-0.7561151172768272 3.155291481239636 +-0.9564271253204946 3.395619890124801 +0.5363438844586257 1.7765100249041479 +-1.480359329078522 3.926204629379513 +2.3766867439003536 -0.22188241979862688 +-1.028073050646896 3.463008056304168 +-0.8392856646639673 3.226060709396968 +-1.2403522763794772 3.666653054338055 +0.5235029864336396 1.7468792705903 +-0.45516313049505597 2.829513438813311 +-0.49653468697810776 2.8505475133492935 +0.9616642810152012 1.3028695737934615 +-1.5964142241032309 4.034616627768711 +1.5297363568210003 0.7170522155247019 +0.9737723038138866 1.316327180800304 +-2.1071566999608735 4.557775592911188 +0.3781275740183762 1.9689008025540065 +-0.8409744943781201 3.242529790943962 +-0.5447410081454103 2.916238368524796 +-4.499530573206747 7.089260361046966 +0.07567491216939992 2.2639662141964743 +2.3220582464998594 -0.10459142329940763 +-0.8933000489273761 3.268189759380142 +-1.6389952765169031 4.1266475521979755 +2.7111635662091635 -0.530486192543083 +-0.9596969430070554 3.4100481291128713 +-0.04255526589884889 2.3985892073962853 +-0.7983640348762013 3.1540621352049643 +0.24660903958868235 2.0396636624933553 +-1.1915763380736883 3.6041191095692917 +1.2186027173673712 1.0293229662609282 +-1.2230769232190324 3.6583576560497626 +2.827771622158057 -0.6767701368802657 +-0.7935541054677666 3.1589842119363722 +-2.784869789553039 5.283663126196673 +-1.751110589134975 4.210109193353222 +-1.6216189968007289 4.059407586484301 +0.08189554704337293 2.2428217560933588 +-1.4547107985761902 3.9180648822036317 +-1.04365720417225 3.4393117151407697 +-0.20695078818871987 2.6047994359720943 +0.06387836018002818 2.2439667981376195 +-0.9820777635781082 3.341985416558635 +-0.6574523749182847 3.0224583858255696 +-0.233826682258015 2.580058236728642 +0.6244808585138919 1.6766509642577099 +-0.6269552579737812 2.990751472414212 +-0.42495505013456025 2.7992957933587057 +-2.454413391888359 4.940458543998048 +-1.0493559050582522 3.4671495832875245 +-2.105285340696229 4.546216318359752 +-2.558137729416634 5.093714620179267 +1.6366484202547138 0.5793378089929395 +-0.33063747489311696 2.672473599468697 +0.6893183722744454 1.6301104848706383 +1.6136068908313013 0.6460963707026983 +2.1567264226859173 0.07625799928165211 +-0.512892860024418 2.8475011191539226 +-1.0213611421587971 3.4132483232588218 +-1.5984778664509725 4.0328463025722945 +-2.0674972579221067 4.5605910282574325 +-0.550436625117448 2.9312938101522055 +1.0387232467052105 1.2210509402329903 +-0.8570363660902609 3.2608687165664656 +1.4936697649406374 0.7388714930563116 +-0.9572475356330061 3.3493204950931483 +0.5884715770327147 1.6977402534024504 +-0.16337480646247077 2.5536073185144286 +-0.8991714166952722 3.327247815048425 +1.3295325924229902 0.9213258078060766 +0.1351782970055404 2.196458146552416 +3.634044397638523 -1.4981174129746564 +-1.7508870201150752 4.1785808629837575 +0.7629431235846953 1.5357378705590154 +-0.20422756226284194 2.5867293198013757 +-2.3446968364345464 4.821449651866567 +-2.548644132698975 5.089111865781213 +2.7048617418143173 -0.5484755298726283 +-0.8189128601520543 3.1493471757831966 +0.49893097941765635 1.7916190478820246 +-0.7106459129555953 3.099669879615483 +-1.283558404380359 3.702289250319263 +-1.9432457679813673 4.381134698671541 +-1.3981504307350454 3.7984060682466643 +-0.22732434209209612 2.5465084205087045 +1.5321072491682086 0.7117203599830368 +-2.7431298918009546 5.276969476865107 +-0.9557634953688958 3.3738530883341245 +-1.5898741756598385 4.052063427396453 +0.9750507623303062 1.2999870748816829 +-0.5811327201749561 2.988248148935099 +-1.0951657611864363 3.5241732181628502 +0.2498706930380859 2.0773555524014298 +0.01524343967945646 2.303415926034676 +0.42797995098716396 1.8470399106350412 +-0.6542424897577142 3.0642314366772574 +-0.8355284013120448 3.2551764129811027 +1.9771986126666163 0.31391891300426034 +-2.4903548618825426 4.951640087034942 +1.9996495967927153 0.25327677565796947 +2.2344556854065343 -0.03558863758043973 +0.8008609131750035 1.5215724502926338 +-1.9623899800679925 4.443414342629419 +-0.3024781383503579 2.6421573533859446 +0.5008896477885723 1.8430775065768046 +0.5513114254804985 1.7624005310555209 +-0.487000957569726 2.8225956568470933 +0.8196060874270052 1.4350870520523857 +-0.30345309564387773 2.7016421540943587 +0.9245906708381055 1.3800539520216046 +1.4780515606317095 0.8386621258456541 +0.3426840972515827 1.9745976362472093 +0.6021427358794129 1.7589543663647589 +0.19622152615359678 2.114579157626122 +-0.14155337857297592 2.496860733378801 +-2.07219333916395 4.5593623619744825 +2.222362313259453 0.005375809189770209 +-0.9698861161452856 3.341420686850153 +-1.6169098685447365 4.021550923351481 +-2.421235817581816 4.850208265776457 +-0.5341255062257579 2.9323476981461454 +0.06427969942849776 2.2895880703875275 +-1.6214877543320263 4.075076196099133 +-3.193415457012381 5.704384470159818 +3.5951594251785415 -1.4503120990016085 +3.026537098473221 -0.8765532740096478 +0.25254293770860453 2.069055437072527 +2.2980815974279314 -0.14255428521015956 +-2.0454989771390633 4.516470170017827 +1.6665110172105873 0.5699440242438065 +0.49326260332447575 1.856108310516834 +-1.4779270877207193 3.8912968482102643 +2.0020804723311105 0.1927440952097239 +2.0933024355588032 0.12826611840610047 +-1.4671549815668674 3.8802305979756992 +1.9585365663350982 0.2767545148607784 +-1.0126831072025253 3.4470516742437995 +0.2891048175026764 2.0984989143187 +-2.282442678347397 4.750365774579415 +0.35125021408376195 2.0073464399025296 +-4.084451341650528 6.663562232942358 +2.1537786915813313 0.038426481577477745 +-1.112601007068487 3.497678266366191 +0.3941297951260703 1.9287380496687931 +1.187639703324795 1.0887644823694922 +1.2615486549315629 1.0030787478188985 +-0.6905695984042801 3.1394354531262296 +2.076939754619982 0.17399789482332162 +0.009656244043759754 2.336989069447827 +-1.1116631181844256 3.5149589020444556 +-0.09817539631864988 2.4413380602948282 +-1.6498172616126034 4.123781647072995 +-1.4000854642910057 3.8159033983443438 +-1.6982206273859464 4.132095018824786 +0.6661162989848539 1.6671267560781595 +-2.8397162051216216 5.360723422370231 +1.9047281289989042 0.3279440640818594 +1.483969688426295 0.7504323790598695 +2.1163067614863307 0.12097891132590588 +1.4314963205439828 0.7740933668561296 +1.040652368177933 1.1966325775512536 +0.24760985850863632 2.0727639741968362 +-0.08245501756654589 2.424865228973812 +0.17488332837911896 2.1770226055433075 +0.10435971220110846 2.225391113572431 +1.094921043194493 1.136648255380284 +-1.2192501714510093 3.641863698865438 +0.5608276002660479 1.7524094530165977 +-1.5675605565686312 3.959838855776876 +-2.5196946410600316 4.9970761420083925 +0.684905245867304 1.6246246092061107 +-2.2164279892899548 4.689622931752977 +0.6500911024029103 1.6285665255083974 +0.028305394348400348 2.279485962359532 +0.9103420510344173 1.3773756856053505 +0.45245589873032016 1.8896480614688012 +2.4532414886084655 -0.2859867137260159 +0.31956031302175286 1.9982174916339794 +0.5838746405530231 1.730017369852784 +-0.9820950418192762 3.3761437806626455 +-3.3545913413605026 5.895654699726235 +-0.18591463331365893 2.5562036010500426 +2.728601434207868 -0.49608220506205036 +0.264471666285432 2.0615716808951214 +-1.7997686493823986 4.275817947786372 +-1.3186464100595094 3.7387366636138823 +-0.9698848177642685 3.3513628406178384 +-1.3697102722094494 3.8192056009303412 +-1.2986459282611191 3.7454807790638123 +-2.308522242902215 4.7811039437028064 +-2.0235576178418038 4.510767070254692 +-1.0214188467241057 3.4172352377228203 +-2.031451344770133 4.485048706417951 +-1.9079976577736726 4.363421101093547 +0.32400211583537214 2.006234827233507 +-2.6750445237011244 5.144098989987239 +1.400806966895208 0.865515271519588 +-3.1151961114671423 5.652247830012916 +-1.3920638006079489 3.8470912711685123 +-0.6529937482152746 2.9783571301012532 +4.296087046268787 -2.232091724192822 +-1.2765354432746436 3.6716283752265353 +-1.4571615543816656 3.9016875788328296 +-0.6963954143329223 3.0958192779505906 +0.7182096207714388 1.593445494255946 +0.6528374736213254 1.6834325019668515 +-0.836424555324409 3.229869460011282 +0.32516908149051454 2.000672641707441 +-0.7805388059749632 3.1988791463326653 +-1.142491379650955 3.5677600536069876 +-0.13008354546579615 2.4971585694175764 +-2.4779299787085782 4.9958460398235776 +0.1796071882998404 2.22285396829905 +-1.4852496524238785 3.941766099274602 +1.814277672981701 0.4296276426936032 +-0.3213595613070473 2.650967682750572 +-1.0063060379507438 3.372059500516559 +2.5947910706315143 -0.3747878762258967 +-1.891191372023354 4.347913289733806 +-0.5050437586183577 2.8608921273352075 +0.22970773468760358 2.1105612786115815 +-0.8618653263288081 3.2711885718113294 +0.575598730952225 1.6972052569144758 +0.7656017427214781 1.5476030792971316 +0.8060049334711175 1.4951300702322134 +-1.5577297957570015 3.974350261915056 +-1.778746048884965 4.262624373328906 +0.20119861576188264 2.125627871236915 +1.5981500343129218 0.6180796912691364 +0.06831185532183348 2.2607743888449443 +0.09323403345484416 2.2369339144709794 +1.661722777261395 0.5741165078414805 +-0.8717145031606908 3.2176995989603965 +-0.9471999636278634 3.309254653733527 +1.458422583431101 0.812025762295423 +0.9219721191394903 1.3457155002553804 +-1.6171268904025577 4.052385993750554 +0.7149250247356482 1.611309266364659 +0.34684759808446114 1.9723244317879138 +1.159833664429238 1.106267600441118 +-0.6778346003792126 3.0626014379971354 +1.074395819747987 1.2338402700601858 +0.7860775128497823 1.4815599802284107 +0.30018543884770515 2.002076939343025 +-2.9417919384480355 5.483202366218401 +-2.859095176794711 5.3823927589076614 +1.0949409958737326 1.2162663071621154 +-1.0840502009520847 3.4903668012490003 +-1.3524249050534063 3.7405323660699445 +-0.059982319714127985 2.382056652420183 +-1.4754561282282497 3.9185333849133532 +1.9563105441550384 0.2644473042236051 +-1.2283475500148597 3.6231248034323578 +-0.10184488808788178 2.450421823551817 +-0.1324750991786502 2.4989384188287667 +-1.3619608376106724 3.769517134454878 +2.2288074703412497 -0.012588735724215816 +0.7936746864509192 1.452288598940024 +4.21007761527517 -2.0878425562209393 +-0.3340193848581237 2.690213552467812 +0.32718308385869327 2.000320904427613 +-1.0799337240795246 3.419403341375487 +-0.05697619027740625 2.422445232567758 +1.8862018797023177 0.3502178741101054 +0.7944638795686403 1.4541982722734677 +-0.6677232454337947 3.040700952548069 +1.380511158030144 0.8444344097513254 +-0.9159334447244383 3.3224948364614515 +-0.03388540122900413 2.341214171793944 +-0.07272019494161513 2.4291529693114366 +-1.2245450535500098 3.6174020918035894 +0.9821159228963177 1.3053408703759728 +2.6764454921033938 -0.5208812190560441 +-1.2148590670752741 3.6174996968991575 +-2.0614013142823113 4.579120405270618 +0.5316233090758067 1.7646018709822329 +-0.024277539772866397 2.3855111994945335 +-0.29120668559564766 2.6347203103864945 +-0.9357358501799475 3.3450793108462284 +-0.31401417105065316 2.6920515656109085 +-0.7204340666973856 3.1287751702507336 +-1.4430287036051648 3.857231768765784 +-1.609920652915599 4.001267987393048 +0.1483945930791007 2.1452043613765484 +1.3137772340857832 0.9825893024634849 +-0.17061023826084776 2.5135615663245283 +0.9262134085898437 1.3442056889760567 +0.7406604244163779 1.5758390589623534 +-1.5491567402048199 3.9988145805321142 +0.5477270328223622 1.7864937133452896 +-1.3364216080920956 3.756294275852328 +1.43243452450052 0.8376521788393942 +-1.4518170962845858 3.8976634379052406 +1.0096265452842148 1.2815026383797599 +-1.5157762514100825 3.9531314914171642 +-2.4666479032134845 4.948509026346505 +-1.1258673744526233 3.5066148860269903 +2.899315893036303 -0.7445129339861176 +-3.2908703443563847 5.812254141842514 +-2.8600663989257322 5.326465184069457 +2.7758310166460167 -0.5819543461898187 +-0.13625514510743636 2.512823837432745 +0.3704116024943292 1.9552935160790743 +3.1386979800902464 -1.0048713872352253 +0.6293999020847203 1.6833094376043918 +-0.27252902809987495 2.6009608827890656 +3.1290437130100446 -1.012443016846094 +0.9774431237497343 1.308529569046381 +-2.192434193158544 4.651960107298329 +1.1542944876427945 1.1053219493926532 +-0.10113603071524885 2.415203845991168 +-0.2980486945680763 2.6490384431600598 +-2.976472590279536 5.503675612551293 +-1.113437469236618 3.498006277117106 +0.051670886727356424 2.30149848871013 +-0.680206096243712 3.082729829650291 +-1.5993731759096024 4.021000791435627 +-0.01711579448643069 2.3562933087036693 +1.4458973118200062 0.8135193613863301 +0.12399423470039395 2.200140597221908 +2.1184529547746176 0.09952254644491676 +-0.9911515957593571 3.360219212045152 +2.038080578102891 0.18043412707688544 +1.6172817470297087 0.6295897662136214 +-1.3311957644952905 3.7538515588229187 +0.06774161455509758 2.268706892796457 +1.4941824070807235 0.7489228864377364 +-0.2128973250381463 2.554926501413265 +-0.6486117313278366 3.0266833481562894 +0.9644958560812447 1.324507132485119 +0.021063478774249134 2.3034091123167233 +0.3009130301681457 2.0696259224950246 +0.1714100638910262 2.1398631112929962 +-1.841875438398112 4.261531789442875 +-0.9911670961849497 3.354949590940468 +0.5457031406762916 1.7571014200560522 +-0.4459364433224604 2.79438647817445 +0.5942760704652807 1.7704930676758586 +-0.4996585535700447 2.915616110187897 +0.3952802695708501 1.9467553356907379 +0.28601900339629766 2.0297743296152606 +1.550177344417616 0.6757316567202893 +-1.7169641184923259 4.152886486789335 +0.9457319868755785 1.2915281803601872 +1.884160157489805 0.3028227539115451 +0.06434929104529258 2.293744253793431 +-1.1045111645105261 3.474870247235202 +-1.6462312232336964 4.134571889097858 +-2.7983065390540487 5.3267761944042284 +-0.41761585232505843 2.799392417243444 +0.764896956400263 1.5265629494737403 +0.3722151969254172 1.9500365610827919 +-0.13131520988166678 2.4971334926923907 +0.061209792348460634 2.2370203336398733 +1.7429047622861347 0.4112349372825381 +1.221392331061976 1.0668539956057064 +-1.3436670692600587 3.7761114695139044 +-1.3238409571382337 3.7229539708849924 +3.6120946518470265 -1.4663397320766314 +-1.9016206198238452 4.3327227321788 +2.111891150159535 0.11864481402156679 +-0.35937168809021125 2.7073382643340373 +0.027494660322638864 2.31971654605896 +-0.6806614765120683 3.0380036524202607 +3.832008966139013 -1.718568150881365 +-2.1029496475163376 4.593054090311399 +-0.11361003064144676 2.485305689134477 +-0.027015711994722358 2.3469935175872405 +-1.1258092378767923 3.5199269651407974 +-1.9369196707900376 4.400953140197959 +-0.4009396776017473 2.7497191241017207 +-1.6058585281267876 4.024258997675845 +-0.11067736589178284 2.461000668319964 +1.714624206796993 0.5033936902577656 +0.6209608039678476 1.6977688345199833 +0.14905502781735475 2.196841054435633 +-2.309633026867755 4.839103864679488 +-1.8704306671171989 4.329841912035051 +0.3581172727825505 1.9829049006484556 +3.848312713680691 -1.6941900401987566 +-2.665618297662384 5.184040261959419 +0.46619851433966947 1.850458133406122 +-1.1702576330551697 3.575351239908849 +-1.4074465353572958 3.8027380453762345 +1.715894406416994 0.502774399863382 +-1.3195300656963953 3.7204028226099934 +-3.9323673254440297 6.535114654746462 +-1.59685598983219 4.018337985867328 +-0.39399110799556797 2.758054802323763 +0.15909037759356276 2.1631498984005852 +0.7597754375574468 1.5537340409041094 +-0.06966627096613387 2.416174303397241 +0.5374869023296585 1.7352934600849244 +-0.39480802815216104 2.7361601717468194 +0.6967657314040752 1.5736170232345603 +-2.3667195715401963 4.864444958531056 +-3.2614294345755623 5.805850463454193 +1.8696919769483202 0.3667834762231208 +-4.299259058351987 6.8914080760121585 +0.5471500193203018 1.772375438284548 +-0.30108322476410304 2.6287360544126233 +-0.344845044609855 2.7113009973034043 +-0.49649273378668 2.856953695095162 +1.6339900114339414 0.6045905096945163 +0.5312302780638019 1.7965776260234176 +2.587375761838341 -0.41084275673788007 +-0.7540195165324797 3.1594463371783394 +0.5998314060367678 1.7180708875413058 +2.1763464382108153 0.0023903906535842623 +-3.6064093940037742 6.153060821640835 +-1.6381178337957405 4.032467943574282 +-0.7967174796202035 3.2211834804149637 +-1.5841171496664752 4.027008046555084 +-3.074488491832215 5.592836033239982 +0.582613669598022 1.720207021701162 +-2.023816324120241 4.479364979482465 +1.7429726709311584 0.5457631456382765 +0.3497471872761286 1.9994681838148003 +-1.0656124415787516 3.4977517190418745 +0.48872745306404847 1.81141519415592 +-4.1055836559631596 6.687870094849799 +-1.3669727123869502 3.791029786358883 +-2.2759810036117454 4.791234358948945 +-0.9988896488476655 3.385456742932129 +-0.4329549454486518 2.814710005472233 +0.9099589218524773 1.3659185917723735 +-0.4835549283538725 2.842545603664744 +1.4173059208012913 0.8745762684064884 +-1.8477089825297908 4.3051005298666585 +0.12531708720749357 2.2036678768220472 +0.896902087556797 1.3448652136611507 +1.427806642324707 0.8546382897029847 +-0.9687528568364121 3.3541610448531 +-0.17258937294846982 2.5271635084246267 +0.7515005283205274 1.544486932797421 +0.41980603813824685 1.8890816553919276 +0.1601164363848646 2.1925306308971186 +-0.1793068501086728 2.504830751841351 +-0.6949797064822458 3.0858976846346606 +-0.6263537210571766 3.0427955796321897 +-2.0047724783686713 4.458208710983215 +-0.26166895887516844 2.6372680816273504 +1.4340116666268787 0.8226346934223119 +-2.7944630601481184 5.272099243892235 +-0.9852210464433242 3.367542879212689 +-0.24873731275959893 2.607145075550881 +1.7189146190787095 0.5072159749412921 +-0.29821355578736053 2.6579860508317856 +-0.8303054087823065 3.2219223724588875 +0.029524820055884804 2.346298479372922 +0.952447351214919 1.3102787664029987 +4.1634838452389795 -2.050909247522363 +2.0199676095883587 0.23915861238068414 +-0.04134276122930197 2.359032722514363 +0.05952197051598951 2.2691535204286764 +-0.3040405115999862 2.690241406902222 +-0.8573780626652729 3.3195999675003436 +1.1551921549356985 1.0903602698196353 +-1.0561115912460288 3.44141355847428 +2.104307679178343 0.12817268460653963 +-0.4303410266296695 2.7829465585902446 +-0.46254922781796987 2.829067181048581 +-0.8196262134554166 3.196096691440356 +-0.089616950854665 2.4446537028799216 +0.6900838407812473 1.6311643598783863 +0.8705453892657709 1.4159124493205026 +0.7828992171964746 1.5523789269890964 +2.901738068143074 -0.7746984313345675 +0.8920441470116178 1.4130623078769289 +-1.5543565130280388 3.981380274189712 +2.301175738229262 -0.14323459381115544 +1.553393603294739 0.6271238821776899 +-2.008968654791922 4.48066816754755 +0.07048835359901545 2.225536493266004 +-0.507806990113886 2.897781325095884 +-2.674909435178346 5.14991356384154 +1.785901957432188 0.45082719412922145 +-2.986083600550951 5.537542083875384 +0.535842629570559 1.752704733441425 +-0.11506481505297267 2.4954767847918293 +-2.432099952521142 4.880799479816637 +0.6430400560358165 1.6812560934929386 +3.053385026672147 -0.9122826760369951 +-2.291540337217999 4.783941633352736 +0.20584129097445109 2.0957809878170144 +2.879375038103398 -0.686105882871106 +-1.0854921896898062 3.51575974908532 +1.0213776954721308 1.2856438251401199 +-0.3051753026365601 2.664093996051061 +1.0068287305926995 1.3075589398132506 +-1.2948064148801322 3.7165514095105783 +0.9730470820664523 1.2462597397096495 +0.9373443344751342 1.32874462924772 +1.3494626438573796 0.8811152912901588 +-2.9768174858799124 5.516336469103254 +1.693747093384431 0.5234749685040692 +-1.8630424906289753 4.344502765976677 +-1.8497854840353372 4.283670198939907 +-0.4152715022005398 2.801770299160169 +-1.4995562081390876 3.935447569110362 +-2.8799718390983347 5.350978817877806 +1.4405115781784426 0.7867751788323043 +1.9240814317592292 0.28310197638876033 +-1.8792285816250807 4.382535299741142 +1.9673747508404473 0.2479290648383503 +-1.8799515884546711 4.304065429218632 +-1.9856544152695303 4.413653048995899 +1.385126296951533 0.8291506052354762 +-2.185592816142919 4.6975194629032 +0.5606354782536729 1.7625534604542346 +-0.40593049103067114 2.780433242270785 +-0.7242414382051693 3.12408709003594 +0.06919522212701773 2.3038003877379296 +-1.052699544213644 3.434161476503606 +-1.1479955643322508 3.568519179714618 +-1.1918461077093367 3.599608696532078 +0.6615926273518882 1.6560300782467032 +-0.2667065765308873 2.6536728235571907 +1.5107309812475795 0.7489195983163262 +2.355980847287974 -0.1796710629726479 +-0.6719761246334637 3.053368041324606 +-1.1741606105111837 3.56988129879071 +-0.11830012044815644 2.464009221291468 +0.2054371561287639 2.1017271337489634 +-1.296425050698377 3.708490214275089 +-2.9504519966469087 5.480801397744466 +-0.0899156454070304 2.4138540662359937 +-2.291086137980844 4.7902612298801674 +-1.387512320515077 3.780733346063924 +-1.4298513352901263 3.8655341847426095 +-0.6597845723402085 3.058042198987757 +2.02612581367992 0.20627238755967747 +-0.3060044184769078 2.6601004410199733 +2.252484105155609 -0.06826380623348349 +-2.634308213423866 5.152152886504482 +-0.2039792161245017 2.5814306422738937 +-0.17548610635314182 2.5066106313880123 +3.0269977760657087 -0.875722467164644 +0.19763423996948776 2.1586193060973033 +-0.9428768098319253 3.260826675698225 +0.1737164617618011 2.162413865643548 +-1.360768945166642 3.754365171015868 +-1.409377632455074 3.832749724520659 +-1.3212394823819906 3.7208536196901605 +-0.3629644919058979 2.770521240140575 +-0.6076515234050407 2.9714287259649126 +0.04689351547886583 2.3326731397303746 +1.2984195770682512 0.9595781282638651 +-1.9153676345601316 4.369653188496686 +4.642106039591176 -2.5690112720291363 +-1.1017946823164093 3.5204063878982033 +-0.6633448293036044 3.051523534031324 +1.2064168277287102 1.0649949575528783 +-0.14677480328183173 2.4883911943305788 +-1.755837967676703 4.214154360529397 +-0.4021766515457369 2.760891369307845 +-1.4098170771395009 3.812639752137879 +0.5995887431905993 1.6898264245670689 +-0.09115379367087939 2.4466948931557586 +1.1120990525896757 1.1907812324884988 +0.6791011850467148 1.6128053982183215 +-0.3340825862427275 2.7154623463561984 +-2.2185955635156773 4.647094172251929 +2.094860137235057 0.15017315560635458 +-1.1805526685672456 3.6250354839234484 +0.3550374206171167 1.9445531358054278 +1.808661966908241 0.46452703597800005 +0.4398994395430053 1.8429843869807767 +2.20768419820344 -0.01330608172722103 +-2.179623240564771 4.651346612031155 +-1.1845012951810048 3.5846806934765976 +-2.3092999517787622 4.813951877898415 +0.9270808622236131 1.2961669484253782 +-2.6026593996896494 5.076439955876831 +-0.867981788822634 3.231034408673211 +-0.9614058477966954 3.368935356562815 +-1.2112859124346127 3.6011614966592553 +-2.200170456016701 4.728319153140246 +-0.28254453666917706 2.604216173202656 +-0.7007037468641377 3.111177461405654 +-0.9992183458853441 3.392555101867697 +-1.2268485686934123 3.669493992331425 +-0.07602302962105284 2.4326902926038882 +0.3179772483581265 2.043450036061449 +0.8656074188612695 1.4098446499541937 +-2.925900261355902 5.438523133044859 +-0.022621406665126298 2.418005284222626 +1.0884667621161916 1.210435530668689 +0.07883112635266276 2.2421646078894084 +-0.6477896492333903 3.012438883874892 +1.6203389098399616 0.6120708374567836 +1.4621214512123226 0.8179871046054739 +-2.6989851655643182 5.2065812056291625 +-1.4853403145904356 3.9077296151205276 +-0.17311386542613422 2.4903989015082737 +-0.8469743696746739 3.2491611906199607 +-1.0116445766528028 3.4731688016540545 +-1.4083514813384463 3.8527474427555175 +1.3005890420057915 0.9719104142753556 +-1.0123428273600017 3.3733686342728326 +-0.6673425316625817 3.0466749053047826 +0.3297995311511838 1.9664073408455405 +-0.09022918890692816 2.4629800140682714 +0.3917577409234511 1.9316563956059754 +0.20781143737723573 2.130476497367857 +-0.26800790341400205 2.6157270237370507 +0.15553511841962547 2.2063909365413323 +0.6174154611504807 1.6722467614581147 +-0.4820451350169946 2.8156172333714675 +-0.10116561026904719 2.4688215897203136 +-0.5817875681623055 2.993294678555346 +3.5522044947960896 -1.4200123204361161 +-1.7648416457379004 4.138976270756157 +-1.7391617118667064 4.208525398295416 +-0.048396308310749564 2.3945187128372365 +-1.4423749219976776 3.901602476932058 +0.7366225981848611 1.57649380094271 +-0.7260839418468066 3.1095645223344386 +1.1202097656699983 1.1102222980271466 +-0.2792268311869393 2.622146306997208 +1.9738828393119892 0.24385447549112005 +-1.5621738827735612 4.04017677876336 +2.5052130926134835 -0.3134653915430974 +0.8740989662663481 1.4163648790203607 +-1.2745373773185966 3.6796684029223794 +-0.5543275433613644 2.9413107581924196 +-1.5392709571987264 3.9657983319432786 +1.8549374744253357 0.3583636026790451 +1.8137637973281946 0.45452466047131646 +0.21227540962769165 2.0958336437250327 +-0.12483117272036812 2.467650911020602 +-1.2878769902258447 3.7158986363818296 +-2.182962579761624 4.680576775466873 +0.15857124734571848 2.1756149400031504 +0.8553989273948044 1.3990782573514822 +-0.9883842996944914 3.4082335827269303 +-1.9153130603288149 4.363848220859359 +-0.8839257789510919 3.3501644416947594 +2.849712701365623 -0.6973357821709238 +-0.2495115552615827 2.584079555248502 +-2.1740229772212403 4.654159737663694 +-1.9923857764360167 4.468993674214159 +-0.48682072887402 2.8223176199159603 +-1.055605754367801 3.4873434708518243 +-0.8416392300236812 3.2000899356130135 +-1.6940431082109402 4.132734235912553 +-2.4010453245047008 4.895240693791815 +-0.17059394501348474 2.551755441762709 +1.6336898721704096 0.5923528985137612 +0.9579980408745004 1.331942682288674 +1.157961848120198 1.1007083971099785 +1.5269798152135405 0.6753941882715502 +-1.645616806067299 4.086968495417827 +-3.0830249893928237 5.631180752526959 +2.758817399915434 -0.5730256555452873 +-0.17645010451980442 2.5057308043468196 +0.26655884063390295 2.068957081655909 +-0.4390898481063283 2.8317146066283496 +0.7626362039830339 1.4997740137583209 +1.0805062318489138 1.1634291316919074 +-1.6794786394724344 4.13891609384363 +0.9476934895788807 1.3556858917381835 +-1.9697546823433358 4.422339957095752 +2.5552732808803076 -0.38775258394059176 +1.4288780348499 0.7939986087484285 +-1.7448890764183658 4.167593584649076 +0.5037965904912567 1.8026604734739011 +-2.178647035183273 4.682011006715895 +-1.9292706296563724 4.4197199135927345 +0.6280369773559868 1.7149584537861573 +4.482101291479737 -2.3838673230966037 +-2.3714398229665483 4.865894823893448 +-1.2052808883959913 3.638554714615019 +2.0826517635589736 0.09983978045139708 +0.5378255454225718 1.7588880027439193 +2.115912945080999 0.06110394821115639 +1.290776595962705 0.9644664167809573 +-2.2629684670026418 4.799260701557827 +-1.1851857557183159 3.590208395661945 +-2.9065924733116018 5.367500193468183 +0.9431900400778208 1.3458447359672867 +-0.8496729423780349 3.201896551173063 +1.2101030066599427 1.0182046363984125 +0.49287927955362437 1.8290545646699967 +0.9891898671286019 1.312942091371025 +0.5386675909260741 1.8125428067994127 +-1.9824050970328073 4.431753154222083 +-3.136679959571072 5.664532408989867 +-0.24060157586981296 2.5744930516588096 +-0.026113971896217114 2.3622779445871362 +0.8061178607927031 1.523003263631672 +-4.337723131071974 6.922076313690926 +-1.6893794203014936 4.162456478087956 +0.9054092804897158 1.326755498833598 +-0.7866923857976815 3.1724968379187444 +0.788912896165844 1.5195412461597078 +-2.0728772588592648 4.551202001211114 +1.7378625928653986 0.5257319844500721 +0.8070026505776151 1.4670695940314702 +-1.8439081205008614 4.299614480516672 +-1.7877183425011764 4.215198628803309 +-1.6965468859879622 4.103143772222801 +1.2519955319489096 1.0143204609588263 +-0.040169299630061966 2.3872216109440907 +-5.126926387294877 7.783512431264027 +-3.0046162113193042 5.523794918633746 +0.3592679474548774 1.97373963096233 +1.5312193318390823 0.655438045550057 +-0.9517080396790507 3.371120201525135 +-1.7006736614443694 4.138937239802951 +0.7922129479081776 1.5002822250060541 +2.30553000418574 -0.10096332058546276 +-1.3986693608788616 3.826592398176839 +-1.5969220019832566 4.041936723278595 +0.1564066669791238 2.1758352363503373 +0.61160764275932 1.705409867654949 +2.436821531026706 -0.23425905597363084 +-0.6642915177239768 3.0865438819968682 +4.549082301746721 -2.454545215367822 +0.5525692797013213 1.7624691136753114 +0.9818951761425003 1.3055668272863388 +-1.2599841225271757 3.710864200626691 +0.3913296315015519 1.9137648970535994 +0.15589622522366148 2.135175343360557 +-0.7253386233556467 3.0987869500797305 +-3.1914071295945146 5.737307994287356 +-1.2092271984649512 3.60933969426827 +-0.11168806049886079 2.4609089685702648 +0.8096690491778269 1.4828185707018442 +0.20991171006095444 2.116131632452489 +0.6209780584148943 1.6738762229970183 +-2.5736327587760877 5.091797523027926 +0.27191606818202513 2.0701497547930234 +0.9293887520479065 1.3517683782463024 +0.8851075443653176 1.3800981797164287 +0.6282708491614437 1.6832070172743367 +-0.014987177099643534 2.3523797642752418 +1.3649967999979322 0.8916248174901247 +-2.4415374125983673 4.913528363991658 +0.9438262335239238 1.3803349710290163 +-0.8809567280511978 3.2811934432415524 +-1.2424850642404912 3.6297535933717184 +-2.937629429489433 5.423349500774176 +1.4248854665108153 0.8273904386736244 +2.935467176290021 -0.7454077469507676 +2.3332172897495873 -0.15975736168849997 +0.4464332139045056 1.861988717515442 +-3.0807396582094184 5.586412221825297 +-1.0063763579699847 3.4084682117329725 +-0.5829950996453834 2.992909211057019 +-2.739156383861815 5.2272571609013285 +1.074326852457615 1.208529618613852 +1.9463612216913024 0.27531679723557145 +-2.641549312048469 5.150917833067867 +3.9351925717180145 -1.7953873419744384 +-1.6960275723255964 4.122302738344738 +-0.06751915369743539 2.491340258107619 +2.0746636772891645 0.13400169616642854 +-0.06014666757932843 2.4165088921005426 +0.352512652883701 2.0014264704485187 +1.4522445058503703 0.7633539996858976 +2.271238393461786 -0.06499348589150955 +-0.882713845636707 3.313939724229035 +2.2791827710006465 -0.09888487922717415 +-3.340617303001708 5.908534552178623 +-3.0422864664429627 5.536634339011079 +-1.858299693520983 4.3081427339470295 +0.6644132474401228 1.6624980507380838 +-1.106579328673741 3.538043130511025 +-2.9549152406097403 5.4643123280264625 +0.7553153861643909 1.544418460927789 +-1.9594402446592034 4.424569429138024 +-1.2739790300069338 3.6914605412080315 +0.987291468450264 1.3016484625285132 +0.6194928038007586 1.6809534725306927 +-1.6480174723271477 4.06551015118293 +2.7484097982064037 -0.6071851788394782 +-0.37488636558557353 2.721672571126765 +-1.1964853140580027 3.63461104591961 +-0.2539858251084324 2.623433665425688 +-0.10668196137169578 2.4790232602789883 +1.642418685259001 0.6055513973690743 +1.8067635002098117 0.4203842763800689 +-2.1770398379836426 4.64048060809893 +2.128883434485906 0.12519263665875924 +-0.45964669644370665 2.8563319244520566 +1.973324673710534 0.28276687351726704 +0.23203035615972295 2.0505474453036756 +1.4891384287204343 0.7780057761770554 +-0.737402813124123 3.1452231351743034 +-2.0955438399827235 4.587721701337906 +-1.3346355053769303 3.785708291544534 +-0.7314096098783727 3.0510073813849936 +-1.2504780892149556 3.681008433249884 +0.6735870110780882 1.6366629314910632 +-1.57658516657824 4.009614204790353 +-1.799610945568096 4.2291588709105 +1.128697688029386 1.1101633068918835 +-0.940162087747006 3.3120003230586486 +0.8793950700154309 1.4599999889752393 +-0.8332057413952518 3.228325572308781 +-0.5282705773512064 2.8847670274938535 +-2.3074295511791116 4.7468038012602 +-0.3712881217447814 2.7602617072712534 +-1.2863128055866984 3.679594435416709 +0.49737228629195424 1.8588014646024547 +-0.7815519284078241 3.1856580652770603 +-1.2636247085379324 3.66671340454428 +-0.7137590520759625 3.086433371267733 +0.9254251424742013 1.3901187714955634 +-0.9791210886519184 3.407575788354074 +0.21859054848794823 2.0869289316191377 +-1.739270896257427 4.188928299144385 +0.24384826497222972 2.0733839689405924 +-0.3155580530021592 2.667180766944235 +-0.166910455018948 2.5193263680092683 +-0.5802019062191901 2.928179163391366 +-0.8232053562112349 3.174635335570784 +1.3863711474135902 0.8361326343862547 +1.9548486908939517 0.28126845957176405 +1.0981808172365966 1.1551310593018675 +-0.2093742769085905 2.552287515989816 +-2.6008138102409633 5.10339099626094 +1.5275923003153247 0.7033181694197024 +1.1242902415023177 1.1182187372832084 +-1.0051082303512582 3.4111019991386033 +1.9754016561805108 0.2455118297938088 +-4.305850234998905 6.920713363723053 +-2.2530445628727596 4.739686744312641 +-0.6427950698843277 2.9941759392015115 +1.7915228142616846 0.41878364737705187 +-1.5875738763857947 4.009077749232148 +-0.7101844936947519 3.1132384718722546 +-0.3400694673529811 2.671522842776991 +-0.591961069678894 2.993027665320093 +-0.4166952836464351 2.8248628551031993 +1.0665935415479975 1.1731551675050602 +0.31463717789124007 2.021395927677267 +1.0269887277687044 1.2817904237374556 +2.0163686943113635 0.19390645925811523 +1.6882846655459622 0.5232253390005597 +0.7408723876953771 1.5763469335673466 +1.1077757336154932 1.1573621425058724 +0.4803578550438952 1.8128602279464996 +-0.8488453222810504 3.2474003372342146 +-1.8354803240349888 4.293365427401966 +-5.930314079500529 8.642521168110985 +-2.784641032218242 5.256251035469402 +-3.379724626276299 5.95473938575827 +1.2650224312478253 1.0293881388354535 +-1.7819474212652031 4.19973395526792 +1.237732283878095 1.0133865747089708 +0.38559501688755765 1.8930085778335135 +-1.9439304418285217 4.422720228529229 +0.2696158670744863 2.070518049757248 +-2.3427420225918816 4.821231929207831 +0.3725940199519557 1.9791098305283295 +-0.08806682939000182 2.388003428095201 +0.5012593170009008 1.8193233497335388 +-1.577821675754733 3.9859953807659516 +-0.3108714460327192 2.6676246633234615 +1.3906421961911317 0.8461626455430387 +-0.8835718038508034 3.2443894892603735 +-2.1915153261800686 4.676294030610167 +-1.435049726204421 3.848114666319173 +-2.1209350771504196 4.600546329010291 +-0.24310652316210773 2.5585754394248483 +-1.145368421364332 3.5228040049003813 +1.8893019942320228 0.31272836707884766 +1.179348435750463 1.092833916725413 +-1.4717192442013836 3.9378470454410928 +-1.2504020909419071 3.67760806134463 +0.21871007767769296 2.145463580713951 +-2.1742311589871592 4.669708875414727 +-2.9387498546855633 5.4095334513599 +2.3894947931028883 -0.2119067593501982 +-1.201164745148399 3.628518252692656 +-3.78103283524205 6.386292733087648 +1.1985640532873176 1.0387594654374548 +0.24701998252439 2.0418534950735117 +-1.2521813513182771 3.6899240830863116 +-1.7599003065091436 4.212720413363201 +3.791405019978811 -1.6696340850185787 +2.019707928739118 0.1716820528688534 +0.5837307753339915 1.7289949804944862 +2.0386206607779394 0.20080697191148378 +-3.1731755078906865 5.7032634337949855 +-0.5566979052684029 2.9138451529362186 +-1.0418327594980474 3.480198290390128 +-0.3619534633504889 2.7600037131469897 +0.07031382503994171 2.321944783427985 +-1.4995627693433407 3.9670127342576533 +-1.617363176457031 4.017362422294109 +-1.0865135697541117 3.4787556444526944 +3.0791400969916425 -0.885632581706318 +1.02317986974566 1.2496362160565968 +2.4869085065830396 -0.28874672417047265 +-1.0772679913200687 3.4909821182343026 +0.6587326589549527 1.639030353743068 +-2.9001347545434704 5.436066774920231 +-1.1881218070997084 3.600530336597215 +-0.7844609329293162 3.1546144481631972 +0.6444061074966585 1.6496621259153699 +-0.8279041288142295 3.1585060089185517 +-1.3269540811966394 3.7580622236249535 +-1.4300135496525939 3.8622835360891057 +-0.981587436204576 3.3535626144945505 +2.0941290853670242 0.11004735999562731 +0.12918303632268008 2.175942922008978 +2.0726775610818655 0.1634550848650651 +-0.7686342974304785 3.138225611315943 +-2.6659632881331254 5.16010549845698 +-1.5137871686724087 3.962021362808037 +0.6730270691779263 1.6317291384647126 +1.9976434150272275 0.19643677016769256 +0.38434436232902575 1.961602600067279 +-0.2962687787126009 2.6151540637545203 +-1.2868294387628887 3.7281799996503597 +-1.1188692345184286 3.550802517148801 +0.9593415884922334 1.3049175617675126 +1.0234325827614983 1.2380826152349464 +2.374506551649316 0.018103473497210976 +0.621014982943465 0.9345752477612941 +-0.11209854056441615 1.2161071025156769 +0.5310719488733067 1.1014009136480896 +0.08345834470340652 1.2029625895289582 +1.3150711651879725 0.6134198768461167 +-0.6008909657163579 1.5545219469421374 +2.1739083255757254 0.15988481047267367 +1.5373794231100912 0.5510830620996472 +1.4718320397158982 0.5398619246948785 +2.766182586333927 -0.11475569925146722 +0.3948725310924499 1.017847031650161 +2.222500938328168 0.19907267154065833 +1.7871745453884333 0.3740815552792508 +1.822063388347702 0.38223062197155516 +0.17018962093245293 1.1504150829289324 +1.4531755484625957 0.6019375350512945 +0.8724691095625029 0.8337262944734396 +3.481405999891348 -0.3577448910231902 +2.6599797451462406 -0.023698663305729895 +2.1359347547631913 0.16718111684843812 +1.207786728945624 0.7083947467152425 +2.636349487733374 -0.015424896300377688 +1.1177004649759055 0.6066437617167898 +1.6741778871398663 0.40764874270631796 +0.8021286212576871 0.8545686945585653 +3.2824176941628753 -0.32327933811773224 +0.7187913104592509 0.9521716591378386 +1.2777419645447263 0.6476594204265844 +1.379059154937492 0.48282774086857577 +1.0019594716123557 0.8055854157064165 +2.104900778428121 0.22034501155710767 +3.2100659801057874 -0.2577201343623702 +2.3281669593515506 0.1402788392617363 +1.9408968563486388 0.2713143752541723 +0.7333400604532009 0.8197311353224765 +1.8781143106718168 0.3150355812185135 +1.4884777964800735 0.6035346683442819 +2.380508041417827 0.07753699842132339 +2.686338071579142 -0.09656125706062513 +1.6629593391529562 0.48895119790948965 +1.0431268716060167 0.7097067546374661 +1.9505550465771864 0.39852865171216195 +0.29644936276179723 1.1455448883497903 +1.3946720382116935 0.5312949164734311 +-0.19612936425146965 1.353394943174845 +3.475433485261891 -0.4835537953969278 +0.7139174832268355 0.9008681439295245 +1.411168597673294 0.5647936296187255 +2.0956452796575027 0.3350711316167897 +2.809636795833977 -0.07517305838364274 +1.0814346912863622 0.66115825852288 +0.6031469025095343 1.0077820577702687 +2.3761154270997897 0.06754342904499233 +1.5784524918148153 0.4279627141312813 +1.996333441378967 0.3102267557429631 +2.3582294194810833 0.09604759296203996 +2.094244899464827 0.2256223367597921 +2.6268298297463177 -0.06735302947791089 +1.3830919916440498 0.6529372797657453 +0.9442176778194673 0.7934250075205158 +0.14962619605963856 1.0781656439490839 +0.2968257655683353 1.0173540053665762 +1.9684976850388702 0.3122375416597717 +1.2123198921653409 0.6654962342597951 +0.9660957406042873 0.8065995269272391 +0.9266613159849721 0.7654000659518092 +0.815969447086329 0.8800254728137655 +2.3492435919001604 0.15467542947539153 +-0.9932675419074803 1.5810734611387327 +-0.10248785041768183 1.316377789383886 +2.9082163621887407 -0.03362529034653827 +1.307216358749155 0.6177047601108887 +1.0068440814775284 0.7490129648028881 +1.745177983670848 0.42935853401168483 +2.9767148587646157 -0.19852696369515377 +-0.3839028368455777 1.4292644150842324 +1.8691526190962249 0.3128183552485799 +2.5891231930074023 0.036887136601786885 +1.4434491294572265 0.6200491431895094 +1.3053442185446122 0.6875535978227302 +1.566386254613099 0.5193172673428682 +1.3341202079364751 0.5307338967228477 +0.9725424769089375 0.6384365145105677 +0.23547812685342873 1.1670289401510388 +1.0305638562856279 0.6790360697449966 +0.5608641479653231 0.9382537105849071 +1.4580350532263686 0.5895942746603039 +1.6122694351407534 0.41581422314151245 +2.239668055751524 0.12304733106937377 +2.1525637106698787 0.23252177549171943 +2.788685606442586 -0.1168759930866543 +0.34536579723194016 1.0697586033765931 +2.1100513739943487 0.11448245558926995 +0.9265389676865574 0.7416551695227593 +2.217637326924825 0.1349355682822122 +1.8385989976233208 0.3393065284660909 +1.2503282488223761 0.6541497948213875 +3.323521014732836 -0.387399359961226 +0.6760962770182217 0.928883862074801 +2.1580211565414733 0.21519181947310118 +0.6931956093078888 0.9517501994655897 +1.2339096448668312 0.7624605688184339 +0.37658965318823157 1.0230861993022007 +0.17835948241786648 1.103711291805186 +2.2631113591507095 0.15768655753371447 +1.9393440068192902 0.31983092351393677 +0.03466235592696565 1.2505300929710055 +1.5147402279554418 0.488670889847244 +2.8583991539774476 -0.10159021242680388 +1.7725029927241267 0.4083652897322749 +2.7267526190119273 -0.009925506106169157 +1.0822278610378329 0.6849005698304993 +1.2828318302608095 0.5341569096682909 +0.4913205658609414 0.9658744976326754 +0.4069212906996156 1.0062832424876285 +2.2428431724922593 0.17275682290016847 +1.6058454451741628 0.47520835108481296 +2.2827231987569667 0.16260289599062056 +1.129464120628894 0.60274956572126 +2.2475929750986294 0.133440614601257 +0.30113575262331893 1.0478745772692086 +2.1484250832778082 0.16175420687471742 +1.4833781600020899 0.4287529762483664 +-0.44745062356327114 1.4744056556530012 +1.1981992169840283 0.7467832509695483 +1.503134797744968 0.5341132008514359 +2.6727341515114587 -0.01897017144036961 +1.3126263163189178 0.5934357729434123 +1.6174245584184448 0.4719779019601603 +1.8499637962728515 0.31558770031533423 +1.1168247776260625 0.6682702811430826 +0.2642070813113091 1.1411172971329022 +0.355398407839278 1.093410428898866 +0.33773681799128896 1.042288435091924 +1.0091565079394873 0.6803937484300758 +1.4339215108790355 0.5832241545681865 +3.2428741226135758 -0.26942431208691486 +1.100449400550713 0.6951406046544438 +1.7764903341318619 0.3868599671219738 +1.2719935629886459 0.581118471428278 +-0.10519064109243592 1.2374976270016265 +2.8060234769050227 -0.03626913820530797 +0.9471224288199407 0.7556812823778765 +0.23503908853119948 1.196713047426504 +0.36141974389428033 0.9426369343722409 +0.7169049710203774 0.9252214168861208 +1.8099674829428485 0.33187888147862243 +0.3486610561253414 1.0269360780368393 +1.5708116543310682 0.4749754380771258 +2.895183116757642 -0.1089244353442339 +2.497941022030507 0.09750888082786396 +1.289303956036783 0.6387260669002738 +1.521432752807894 0.6480078075086445 +0.5827142210790346 0.845453251859647 +1.0727913084585659 0.7656672757847175 +1.083547689436807 0.6775820195672784 +1.2942121055756681 0.5287138175534971 +2.11363599068686 0.2813535667952968 +0.0035837958158060346 1.2789800254636539 +2.016643719601172 0.3157284877104939 +0.5299815639781548 0.9277283500089997 +0.18798394643523508 1.1467851638051825 +1.4684089649565364 0.5096728430919875 +0.24963336193350028 1.1100906270118889 +1.6725061713188307 0.4626436147895151 +3.964501562393261 -0.592826307673592 +-0.2460647703773653 1.2721473570399575 +0.842840554073257 0.7687421444381716 +1.4785735985473343 0.46466931978368237 +0.6995539006325778 0.9594818955378853 +0.2974510753577304 1.1316813000617891 +-0.04121699371088661 1.2426086398664546 +-0.30173891311288537 1.4738157222379864 +0.37921722753909504 0.9242161966487223 +1.5487741968150746 0.5131314932051736 +2.1474674480855676 0.17548555924133274 +1.1138276976363715 0.6369078727987515 +-0.27208859065447255 1.3531157323002043 +0.15912200230914064 1.1725292872845614 +-0.5530130881459916 1.4040419870271426 +0.23269863323334872 1.2360529779039793 +1.5084707829156567 0.45037721296292893 +1.980162419242961 0.258548521401927 +1.7627228697084107 0.37167061210859165 +0.9320646655717009 0.7229660671172107 +2.461938256767675 0.04154415496463382 +1.667309829830075 0.48888275859041597 +0.6533991511366766 0.8623812379363764 +0.8500744133699274 0.7132316004613424 +1.0325223140050706 0.6611712875953817 +1.1206280535060027 0.7280576495831137 +1.842233233343233 0.23188773062721935 +0.1884333351383105 1.201382239459921 +1.4427905270873844 0.5350747889670617 +3.3349837852614757 -0.36116607853121563 +0.5623262936467566 0.984320575392454 +1.6294404017532307 0.4617848232699313 +3.179599149934185 -0.366576671418841 +0.36560788994396254 1.0331351345855724 +1.0145253665101281 0.7450923061894417 +1.981677314451637 0.29999082739599126 +0.7860963400493521 0.8278191427813442 +0.1758234138049657 1.1528462880853256 +0.9535958109396647 0.8042607500479438 +1.547648600265847 0.5005295522098624 +0.5588761884426724 0.9333379530806256 +1.090998592489065 0.7141004076111281 +1.7283009870957764 0.4411145167205325 +1.6546412055658586 0.4206433095244083 +1.7018554166408728 0.3706324885129568 +1.727317637759315 0.38066990717412064 +0.6175562829345921 0.9762166072525927 +1.2670114125481875 0.6280121681350461 +0.5332820326943994 0.9511824646498972 +0.8234552333129062 0.9237872088012264 +1.1614526880599962 0.733037928082563 +2.0954898018109205 0.17827391519879782 +-0.35058837090270845 1.4113732858756944 +0.04427906127195791 1.3012410575002584 +0.8286871755551821 0.801079497173184 +1.1180284727179535 0.7020855624266442 +1.7313466620352909 0.3981786637799877 +3.2657202466275166 -0.3524477404071096 +1.2564573599515574 0.5571023777323253 +0.29940643738596284 1.0755180120914738 +0.6057662708433164 0.9381315482452718 +0.7982249853011361 0.8269840581983106 +1.2298014073541412 0.5591834984366 +2.1386806723247753 0.18875398162301382 +1.7298103977925283 0.49948053359456335 +1.0684360600091865 0.7185904348867417 +2.1925677549386666 0.15522038006714622 +1.1031006613715368 0.7095789904317511 +1.0805023682726982 0.7063032069416975 +1.5413377908874437 0.4541544058930869 +0.4669597206181315 0.9424392064167872 +0.2710478845258897 1.072284404709876 +1.392058982807826 0.5291190536329184 +-0.7511692466337692 1.620143398300507 +1.122514715375592 0.5546516047267632 +1.9396728784258221 0.3380248908426382 +0.9928356833702214 0.7681366774998255 +1.5237301754675925 0.4733430913914777 +1.7727798425856287 0.39751772411798625 +2.6119715129934464 0.045606323512004665 +1.0071732578870192 0.6267220263450198 +1.468914125521494 0.5438143727267246 +1.0984885619965166 0.6980878289257209 +2.2079982840377443 0.15856209532602167 +1.7646489980849798 0.30040723660871843 +2.068804193662173 0.22454344486167588 +1.9105978671154167 0.3217286874595422 +1.8310170861292885 0.40560916779734646 +1.534216448345185 0.5483981356526488 +0.36159441896575384 1.072626215462609 +2.079407124203234 0.23082168617547444 +1.043553223901894 0.7236400645989907 +2.2765627149550434 0.0667374642416711 +0.8957504738453397 0.8137872324885325 +2.1939224699973723 0.2229062497216513 +1.8566207311539964 0.30620857985544575 +1.4145257365840278 0.5528622368199407 +1.8140778033607106 0.3831510954829811 +0.41669294533526935 1.0744427658938096 +0.6158307923557492 0.948657256673068 +0.9497887977511223 0.8388779780202003 +1.4427272442798178 0.5994203772225924 +0.6756418743610089 0.8307267485957659 +1.8658385652228644 0.348971853920766 +-0.754464916398802 1.470529259622078 +2.066820388469643 0.3133878271977399 +0.5426127126409377 0.961644553996738 +1.8283202250996835 0.22824421083663998 +2.171449239732154 0.15618369638692026 +2.079988397431356 0.28729905566660435 +0.217667182942753 1.1624510712924048 +2.175318414921307 0.16155503961983586 +2.4037165321393923 0.0183679194139178 +1.0863982342838563 0.7354494371726039 +0.35012804050541546 1.0041755834831148 +0.8598233834991912 0.8674619935152159 +0.2852952197256906 1.0697230660359704 +1.7326817347837495 0.4406840944245077 +1.7109549788341556 0.43389022398359045 +0.023469233690672553 1.191091653839632 +0.4410655246456512 0.991373755224763 +0.5965817110330589 0.8818574117122606 +0.6320684548160014 0.8293505000944048 +0.7311393270965046 0.9257027960302167 +1.1576856841508276 0.6716802433767592 +1.27347687858974 0.5973398538550833 +1.5292946559325364 0.5183068426766404 +-0.11452795313720099 1.2274734398738072 +1.2511888410582137 0.6627305852751536 +2.431081482732246 0.07932046823754241 +0.814489402962427 0.7768849635450902 +1.2015016808776375 0.6610699924142178 +1.7946914522069863 0.3670589081175334 +2.2825515528756624 0.15201701484543467 +2.4346601110012305 0.06163642620847265 +0.814635792863443 0.8579888818203797 +2.1006487463590062 0.28518855350379524 +1.4715223095169183 0.5202666102600078 +-0.14775316874521938 1.3634276356522876 +-0.32070324008599593 1.477052454695181 +2.5758470085936027 -0.034012809188981086 +1.1422209880058016 0.6659506174336486 +1.126886560924294 0.649073589398727 +1.2481063953217544 0.636905148183286 +-0.8301058794667104 1.5805901565153577 +0.22941504360969467 1.0861442840479916 +2.0328743660004194 0.21377596131340754 +0.5394538138705475 0.9754833026718852 +1.9857729218196454 0.2619633731426392 +1.015070578481399 0.7329201592475749 +0.44378125217015496 0.943893200795795 +1.3751881405009145 0.5617534732095341 +2.2708580304419157 0.10419323297853966 +0.718279264841496 0.9344170722527201 +0.716438837391211 0.9311298122026412 +2.4307712245673887 0.14385594748295244 +2.235148546419618 0.25598943999222434 +-0.4153269857225943 1.4385388686602276 +2.198581579783106 0.21843313111826668 +1.9449449357992545 0.2751785011748999 +1.0306456258173342 0.7695100794418716 +1.4221839884618588 0.5237622402464597 +2.287000206239467 0.2122867232439093 +2.6389962119536214 -0.04134495322124143 +3.5375934134567286 -0.4897687581960126 +1.9213491352605294 0.31134503808479536 +2.088871060338228 0.25136372114807864 +2.3981057506282726 0.08879644095706496 +2.8858137262140104 -0.19598706690551448 +2.016231502503647 0.2774289784034108 +2.1905307146458775 0.26190841464397435 +1.734561166455494 0.39824024299800975 +1.9873184046863532 0.27461640079355676 +2.0606885425772656 0.2613204859109542 +0.6628691377239275 1.0252229963934454 +1.0222659171507067 0.7650204567268521 +2.993030646553491 -0.2211108702428094 +0.3243130828019358 1.0152598921531584 +2.2376877253605283 0.19140724612288895 +0.4315898794297566 0.9741070885260953 +1.5397298695809707 0.5398936914666523 +1.5158610129257726 0.5296559351111025 +0.6246129521381539 0.8085171250035955 +1.7827510732069993 0.41684672068156703 +2.1979614414822577 0.12702227885958217 +0.6396877613334745 0.9710444749359965 +1.344695000664449 0.5069256092006572 +1.5004025585220668 0.46565769941629837 +1.4438226424857563 0.60002454880881 +0.28582315604834463 1.1096494510326051 +3.255613939471333 -0.2823066495879235 +0.6518168969746482 0.8303930305276438 +1.1608906541834574 0.7122499467407957 +3.1732940941135515 -0.24924531144962825 +1.059357499896032 0.7239026507353377 +0.9701512647281457 0.6853304959196795 +3.108854949188051 -0.22210290047430736 +1.0497584617449014 0.7504375530992791 +2.2378937346973644 0.20200992869291434 +1.8202220905707176 0.3101928782998988 +2.0333310129650757 0.2374597240059375 +1.122296743007391 0.7175167630515517 +2.128928650432893 0.22192242722993044 +0.014683753161531365 1.0894035586268016 +2.157546088128856 0.19066215367870903 +2.5532112205426656 -0.014693959107195398 +2.4921445814528296 0.04054752240059223 +3.3591839845684035 -0.4087869167030147 +2.350709286508005 0.06381566629496815 +1.9778151043790317 0.3359129058579866 +1.9425031931088232 0.24396372251168463 +2.1706071489832013 0.22193374843712027 +1.528287367585716 0.4794416934603646 +1.0194729248289285 0.7711225839187026 +0.39368165028337265 0.9807922688160797 +2.850271328387757 -0.10028973655862394 +2.317245137593436 0.11520626886694202 +0.9689387943651173 0.6669421713768368 +1.7350922549350023 0.44723408666410713 +1.0211502257032827 0.7950051483983224 +1.681833771392334 0.4522825850518911 +1.1520004693022488 0.7289482082761736 +1.2056485432512176 0.578991925562874 +0.8957776598251741 0.7238541649475163 +2.0758719943759116 0.21856211813107995 +0.790709777714176 0.8375889780198398 +0.4743042463495847 1.0128122691912147 +1.529132485486133 0.6356986572334026 +1.341692362853775 0.6036510712934773 +1.58988556342057 0.4829610249880504 +2.4407491807306827 0.06649632805435468 +0.9986160397458941 0.7268868401468508 +2.1854722802764788 0.13354344837723664 +1.4852493136098923 0.4865282260895646 +0.03466326867625047 1.2816058265806305 +1.007891453413809 0.7245773067449437 +0.6769933672537588 0.9504824618606993 +0.7167253209374131 0.9020012182953078 +1.442610397541278 0.5170213730599289 +2.1790644803147377 0.14675462075086698 +1.0700322728838505 0.7080390519909818 +0.557889844243882 0.8977952272214726 +1.4851082653872478 0.5125849184575559 +1.0590495440171508 0.7482477429206671 +0.11649280620878333 1.1681279681340215 +2.9409777097174588 -0.21707968601574745 +1.404659669637777 0.5810267125287553 +2.405222377735415 0.08596144180997961 +-0.09271847571999925 1.2296228453630773 +0.6044896144680355 0.931242967564375 +2.919448931360999 -0.158991027081814 +0.8242564121122719 0.8165025587167498 +0.7128682053926888 0.903997036985284 +2.310289108497673 0.0968721352627811 +0.9488978459720671 0.8409198970908727 +1.0622546990079926 0.7181916649534186 +0.5735118394791997 0.9345945558156996 +0.15708517173163306 1.1377507626784555 +2.4886084391714505 0.010035243560664875 +3.18624502989514 -0.2661963155060725 +0.3389273921056404 1.119532284693784 +1.5825130474435078 0.45072472864445595 +2.038910283836446 0.2724139549760004 +0.3459943525411695 0.9697536159690417 +1.2320169909331413 0.5950007456836337 +0.7062919228056301 0.8629052692515611 +0.047094905733253256 1.2381559455900428 +1.298780761966665 0.6076802433039405 +0.9612865238226046 0.6954028667756932 +0.4578531905756391 1.015795196764693 +1.093374937277157 0.7131253734364152 +0.6066133286770607 0.8584714856342932 +1.7187577019894238 0.4053079869674193 +0.7779437892696337 0.8510566998744566 +-0.39397702607459006 1.3486094729736657 +1.9594141549912116 0.24885590110332295 +0.7892408587009816 0.8142365816141741 +0.5518397286367317 0.9500986985745692 +1.357969706948512 0.5902508110832883 +0.12478710295912832 1.241474919566864 +0.5079110997150184 0.932675892712564 +-0.048456442702625946 1.2445881524796725 +0.6254208045251544 0.8543655109906207 +0.8892812413212711 0.8323574478632327 +1.0731226696033564 0.5881667088996585 +1.6078685912480801 0.42686331446141634 +2.3837968845078734 0.10556443401943455 +1.2686308931945685 0.5680420362866144 +1.7049835546196654 0.4048442673086371 +2.0047842723991494 0.2729666851559084 +1.4563420135647052 0.5539105211741683 +1.8016303693952613 0.41827531422319847 +0.7673530127081306 0.8365585565154261 +2.253756420352391 0.21189307233490595 +1.9260196903790474 0.1912136896656791 +1.257010600404331 0.6055077588649634 +3.1271740479682713 -0.2232469718849598 +0.14634752669382456 1.1072949729433694 +1.6573170919445392 0.37395167871948776 +3.1799658553856966 -0.24004749349767396 +3.184956213047374 -0.3456649391825448 +0.3377945692738429 1.1375090850973102 +0.03663148978776021 1.0904516109249403 +0.962387735830716 0.7910847853649934 +2.0282640009458994 0.1913669939905615 +1.4128442001999333 0.5279597616159519 +1.0369814445796808 0.7606513479870478 +2.640005672256981 -0.023470551837819875 +2.098032169013472 0.21783686490850174 +1.7143108212612566 0.41931394122313 +-0.03260512693948159 1.274288340112139 +0.5737912955072967 0.9139286171744905 +0.31750193654059444 1.029593253802989 +-0.9755389339861191 1.6569147979799943 +1.3381468978654598 0.6356173425170325 +0.16309875771975735 1.1667582326905208 +1.5616078233174675 0.5298322918673322 +0.706313679472792 0.8131150901585897 +2.019606901205592 0.34414759902955294 +2.084231736155264 0.26606257289484125 +0.716727806708354 0.8847565236021868 +2.1456795126704415 0.20170823757874046 +0.6861566448031293 0.9818546957991945 +2.25652769107462 0.22133506103926642 +1.360461239474268 0.6264369810452972 +2.3587338289778987 0.086547539059317 +0.07093866007457139 1.2788488199684602 +2.1091063758947386 0.20143992568170255 +0.1293444898374856 1.1785355741413475 +0.24019308358175073 1.0993788455489084 +-0.0490371100222502 1.2660993013633857 +2.180480136452232 0.22695443284767225 +0.660378022271575 0.9543424243030367 +-0.7296207983963321 1.5597089468994407 +1.5061156394707593 0.5191497580543251 +1.6601224857942811 0.4177499273552452 +0.5052817387135691 0.9552314026603566 +0.3295297936549919 1.0977131481394322 +1.6786140524196949 0.4490912424271659 +3.1279870896051074 -0.287946259803915 +1.2486707109116544 0.6447278353359217 +3.888267194284844 -0.6543745186064605 +1.970300161303689 0.28406624865260965 +1.2972176253891028 0.5978952718701784 +1.16875249004893 0.7653563996473001 +-0.484002291300224 1.4096178914112114 +-0.12944819312096145 1.3328587558317615 +2.261480135279872 0.25991751551859965 +1.3432812314608282 0.6229537404475362 +1.7840146278982667 0.44433758288494296 +0.4536371290021737 1.0743478209169455 +1.0504526497689537 0.6729965206645864 +1.031351872856343 0.7746133544173273 +3.471707560112934 -0.32998112774641064 +2.451457655962564 0.03920134945560494 +0.7351174356720995 0.9486242869891719 +1.9606089650345158 0.2802494110063956 +0.039507708400078334 1.2147744276486292 +1.6904590428472048 0.4163524938193583 +1.8397780409540072 0.3079950455229291 +1.6579971260783926 0.4101464908012724 +1.9196321782297927 0.34095861574363195 +0.8175157088190024 0.6697222333663624 +1.6727334665569045 0.3784685033960618 +2.0568729439176257 0.2620988880081206 +3.158771221412196 -0.16302586571103939 +1.5727145601186199 0.5192655987101579 +3.3416552605081886 -0.24849648695910487 +0.9002185391413522 0.8039982070239364 +1.5398034837381762 0.5142737910585345 +0.4578543765995734 0.9956083610087058 +1.3798391050429675 0.6156902023647338 +0.8419485104213791 0.7372504289640411 +-0.5538501908571389 1.4530172049102061 +2.8620012519384064 -0.1734825860478585 +0.8487716942179346 0.834849074773703 +1.7535350629967033 0.4468844602338047 +1.664434125313382 0.5320272929081247 +1.3805766130539234 0.5866569987181526 +1.2484029437914637 0.6483086945769471 +-1.4444782415399 1.9204850522903891 +2.243074246107356 0.20186570167103607 +3.0220411784646726 -0.19805122049396373 +2.1968368470927917 0.274711281134997 +0.5354488973052436 0.8789724377030905 +1.3442694222999274 0.5975432665692874 +1.2190799139858293 0.6807947513946797 +1.1569952320727692 0.6806286610997776 +0.7806125501040535 0.8548429316879147 +0.8366800235366875 0.7857883751919792 +2.943022371444788 -0.18294300928466734 +3.4546109206382267 -0.39000266232260683 +1.7446189387346713 0.4263128183595344 +1.325666999504134 0.5858472696246588 +1.1533162198575733 0.5460775368733523 +0.6695871112236376 0.8537938989189355 +1.482199311586418 0.4686918114698133 +0.9676384539858849 0.6915006790541394 +1.2545915367566194 0.6045497979092348 +1.443608361265719 0.511919181782512 +0.9845987123591948 0.7752670068073422 +1.796541705863448 0.36203680925383164 +2.439736622811494 0.09034818642193065 +1.8815954596204714 0.34244742373904374 +1.7061981371267732 0.37065946724215365 +-0.14901246307115534 1.3535501479649412 +0.43299173378123024 0.9795060871631451 +1.0786687562470134 0.719691250643152 +2.7446288461530877 -0.0667773617809777 +1.0508097094070967 0.7551909606606158 +0.8149129212066009 0.9218844659155647 +0.7923351273718633 0.8017887958017453 +2.569628241237873 0.052253486221089274 +1.0376029159082212 0.6973721115828388 +1.5800738976015494 0.5118916846159614 +2.172520113297235 0.2478754674755188 +3.031359953161246 -0.15726100529278364 +2.3393310369912457 0.09900900069665258 +1.3660545334942957 0.5402955852076153 +0.3550337038084057 1.0550629757040806 +0.7051930493105055 0.8748929090565951 +1.2554109030619076 0.6291623415574196 +0.32867297284524044 1.0355228615134813 +2.5360168891897095 0.08765882227159005 +0.3929913229232498 0.9754694639879363 +4.088650505298415 -0.6128314674611663 +2.6477113946887574 0.021698917220860925 +1.440504513766712 0.5277577012130084 +1.304277633810151 0.5846947405457152 +0.3166289562039244 1.0992950290981383 +2.395152311034498 0.11911610155734226 +0.5989590877048112 1.0535904682180846 +1.5789582878655692 0.48301177630001335 +1.6560435313578157 0.46426113223654236 +1.6523616031420407 0.34525616773361145 +1.0276535668402538 0.8195908711771127 +1.818731901600095 0.4160652699653239 +1.5836283330100331 0.49829608607199455 +0.36619341700593067 0.9617799853834983 +0.2696852088399906 1.019459517990128 +1.5208766660509907 0.5375527840392893 +1.9110053678378192 0.33769364729079376 +0.5515386022729344 0.9192670195616675 +1.51647182433515 0.4669303293266349 +1.3198287737532926 0.586310812482042 +1.2593078133009972 0.5860479819111981 +1.7425175616502635 0.4601679902231395 +1.308316636267197 0.571421739049873 +2.1126269600855263 0.2690131284719148 +1.6931028684452951 0.36499797334909295 +1.024291460164033 0.6596199925793436 +0.3396533902459298 1.016548508578796 +2.9179189562703316 -0.17284674475262374 +1.873148265261241 0.34632112063160614 +2.112749314302425 0.31939317439683895 +2.440800358078282 0.030046308315485493 +0.5810228851046606 0.9644949213420353 +1.0722558762576289 0.7439045887922275 +2.241655716915015 0.1470133290443354 +2.319144829237754 0.150189535190822 +0.5018760397682012 0.989037566990663 +-0.05105646744498826 1.3147053019605974 +1.5131746315358048 0.45741562598959085 +1.113066873491848 0.6958923049415062 +2.242430279677298 0.1882796120089677 +2.5041904907196466 -0.010662286664623322 +-0.2677031909639753 1.2487105613275284 +-0.308936101420908 1.424762722002709 +1.1378071668706051 0.632047948797405 +0.8279472678292366 0.7938096372928354 +2.4159150584613647 0.14740498686326542 +0.4232988886678861 0.9524209407194493 +0.9947130043405275 0.7755703973532359 +1.7012223139937976 0.441897269445954 +0.22234893246372311 1.123846246267558 +0.34710099044524756 1.0394890957278362 +1.924695578981832 0.31118846507689535 +-0.07696842191186559 1.3465571555180191 +0.9011575079005257 0.8731999364445742 +1.7794173473758095 0.378204704909488 +1.3302133954455293 0.574672756752329 +3.25593103024908 -0.27758408907146936 +0.24138954400054868 1.1674888215858141 +1.150544784331355 0.6834699721974705 +1.4363686504076576 0.5582353530615598 +0.6879951716001307 0.8878354048967104 +1.633118584876232 0.413231749692219 +1.0940782463907341 0.6386949338257459 +1.8676589823040424 0.3923691095139646 +2.216747835668253 0.22523768016898915 +1.4736043244592922 0.5392593869428298 +0.6535961284653455 0.9713607656095129 +0.7452830535795001 0.7896429960525323 +1.244392669211822 0.6696240676227837 +-0.3536113180759817 1.3982805248030563 +1.915702850533453 0.35975429395157066 +4.160312362048103 -0.7248648640061885 +1.5541890758580608 0.5116537268978392 +0.5434304675004294 0.9802044243439775 +0.11304741225562376 1.1414735165272725 +1.0346211224702828 0.704939153993726 +2.9972926688830386 -0.14339482619305743 +0.9713818815006738 0.7371304793504901 +1.8012822736770846 0.4148751085012904 +0.2998965675227654 1.0685745213714526 +1.6706962853304257 0.4879079282183323 +2.171641883889192 0.21236796790457357 +1.4107153427480648 0.5271370097139128 +0.7077687047060369 0.8909554867894081 +0.52009305781788 1.066699687489435 +2.3433622286148577 0.07465126711649628 +2.0367338695391313 0.2445698787683453 +0.26204605141897197 1.1299088108289415 +0.07631482805097134 1.2371225069959975 +1.8300130504068015 0.31977454758843293 +1.850805273163627 0.36444699356358323 +1.2426473941819958 0.6271458355724324 +0.4229335807692759 1.0467372032233535 +0.22220393354450096 1.1193755905311338 +0.9643504050376996 0.6237652216599808 +2.3381416209399446 0.10125338917948323 +2.277796644507065 0.1911352058630159 +0.7713449549955572 0.8200077603011796 +0.39221667579712527 0.9757506530040072 +0.40835255910696977 0.9721657915638069 +1.8170438482500506 0.3257537178043114 +0.5542323475814592 0.9986883895883948 +2.497861866992266 0.061218087304621416 +1.311947199445438 0.5834633888372007 +2.235143351937933 0.2347122810824106 +1.1628016318159262 0.6728864092657054 +0.43419764665006244 1.1285476538185806 +2.224784963325762 0.18384547145999336 +0.19170760037596635 1.1166170932126869 +1.8254926737756756 0.3979747059091907 +3.223513620413448 -0.37986445959752024 +2.431091293916803 0.1324958810089381 +1.104910897019018 0.6159559936381651 +2.292387841837666 0.0958393499681614 +2.7003052827602554 -0.03529467443501577 +1.1921547590926513 0.6310134042153217 +1.332251682236552 0.5540793456548336 +0.756765722389105 0.8820147679345132 +-0.5992633702829735 1.4912594717522671 +1.5649507375491976 0.4954650937046513 +2.1983051410533916 0.25683133621535553 +1.4269719468140916 0.6112793911272358 +0.8152768165913158 0.7205175955888997 +2.303244553025478 0.12437527540391446 +2.1423209023138394 0.2477805347938527 +1.0993083139755933 0.688765312673449 +1.0315082447535084 0.7533495882417888 +1.6510613438951274 0.4354661622550804 +-0.5320433209722284 1.4725250248699124 +-0.05644904392356409 1.2423184083868781 +0.40795426370930166 1.0755057408809807 +3.0290843034405084 -0.18534373086072775 +1.489007263008094 0.49801439082210475 +0.16252470332341873 1.2704078977247446 +1.7452321468529641 0.39815242410237717 +0.3529645755544746 1.064712327073646 +1.9776898273237367 0.2866933761984087 +2.825138974319133 -0.12041779495682692 +1.7212478171162213 0.44326004390434987 +1.8581625411739247 0.39397090105016225 +1.7253790805824458 0.3695629787211373 +2.2568581703963364 0.15993427387419967 +1.7881984119340333 0.40451186267203926 +0.8264015724187747 0.7734569095588055 +0.1265225863257884 1.1357004710022887 +2.700665866827859 -0.006631751659521012 +1.7176050779835317 0.35355810726276926 +0.9320883086227726 0.7991042529821862 +1.0475336592968256 0.7319078037652934 +-0.009586776054026025 1.2480865269013304 +1.768428104027831 0.4068107616349155 +1.2518697413453561 0.5805735405284597 +1.891524557321434 0.2770420825170954 +1.3682398541809204 0.5388186965281414 +0.888287798696106 0.79353136035898 +1.4089768991410494 0.6124793254131117 +1.8313758838335965 0.34885557454145516 +1.1293339863405367 0.7262565745990797 +2.776865881160192 -0.06872797936346942 +0.4452839698137674 0.9967260801554477 +1.5728629847786026 0.5391265895903615 +1.9152921790771842 0.33449168999286116 +1.5101491921859829 0.4764329581541311 +2.6141270971633626 -0.02953803071190686 +0.7089144660105556 0.9054058445307555 +1.7162332480801856 0.4485458591864879 +0.11803038357962015 1.03033683738149 +2.1931157475718503 0.13671498970006435 +1.32839632524439 0.6847473807176062 +1.2154778214550623 0.619013969227357 +1.124118141016014 0.6437513906274528 +2.475350315015712 0.09597511496128702 +2.0062999847951777 0.21194045432556224 +1.030866078599003 0.7765621021922403 +0.5442815497467299 0.9571337641438757 +2.6564991351255856 -0.11939667041179791 +1.8998579738969563 0.33330072626830637 +0.9542585372509398 0.7057833491540892 +2.14189714086169 0.2406235560772278 +3.620996928463139 -0.4605645229268165 +1.9782938200483478 0.29501955076153125 +-0.5917610572315413 1.437669060982738 +1.3675923099965595 0.5426183968692372 +0.7480215252028928 0.9443393511044892 +1.462883592528132 0.641951015885568 +0.9173510819564136 0.7422822098807458 +2.382886258717418 0.06626740112334739 +1.6880225262626378 0.45165673480879687 +1.511991194800619 0.47582330224274116 +1.1664033923246877 0.6349574922637989 +2.5860192167058096 0.06312538528483136 +0.8553885083131169 0.8677396856961735 +2.7737948684392313 -0.0915491927305847 +-0.3216184653530485 1.3945162283400552 +1.7848917378987526 0.3970243636842876 +1.5609735448464561 0.43213754263265114 +1.8208879382670786 0.44429225462289956 +1.6063061577627347 0.3811076828301334 +1.3194281392750584 0.562560883416109 +1.5627250089282119 0.48628768981262144 +0.6909140388471703 0.9868015010667772 +1.799110927105322 0.45175326582099384 +1.8372918257375064 0.3156778499734569 +2.0544886709624715 0.2726076712934262 +2.5343165081433017 -0.06722380844916365 +2.0745122617371554 0.25582378248142557 +1.948892024305554 0.36699958948703487 +0.8653731698164989 0.8565481129596918 +1.698181512164088 0.4290395666459408 +1.876905476197975 0.4706917874705067 +0.18274834713988009 1.159448958929823 +1.4897356534492174 0.5661960700218537 +2.604723997411619 0.03821698831143516 +-0.7126801796538109 1.492673100419133 +1.199716767669652 0.732050907576018 +2.6402243379610013 -0.07756301154531231 +1.640439527874455 0.4061186185232215 +1.9969904219161942 0.26030019488047246 +0.5953278122612181 0.9410824848101446 +1.9650598146850178 0.29521510880190915 +1.663862561793425 0.4334234267836353 +0.8728404721109271 0.8630657427710331 +1.5593341739768756 0.5639652174555265 +0.7333553085755401 0.8602901980159272 +1.6056600697138195 0.4634150873094257 +0.16290485246452602 1.1890577658476311 +1.2473198243545038 0.6467220747752858 +3.348647656935433 -0.3644151219233176 +1.110887246300763 0.6007646997917462 +-0.2035057331518333 1.3461053321822125 +-0.3684267523753648 1.4145312049946788 +0.8217204660244226 0.8192800855137412 +0.40711840291458623 0.9958089303942568 +1.9801600952529976 0.26690163797719224 +1.5825484781706673 0.5357596673259696 +1.0533944450685524 0.7401712776253346 +-0.006747170595788443 1.2684449559887119 +1.1976025255158307 0.624132584359123 +1.7685032136588814 0.41770268754387807 +2.2172726552994075 0.13518860218212242 +1.8024005099574534 0.35352924297840993 +0.8254511137984695 0.840816356965816 +-0.13543069298927057 1.2581191229507787 +1.576833265623812 0.4822814916826286 +1.860421825038146 0.30436111715455 +0.2968172859884972 1.0981111877898417 +0.7389240811652995 0.8235313524320906 +2.73937769365985 0.05842662894795714 +1.4168056873418295 0.49013387098495004 +1.863374591889555 0.3958694752877622 +2.480979191986252 0.026068818707821184 +1.278688060647709 0.6873027843244396 +-0.1075291949956596 1.2848106481480206 +0.9032979539264786 0.8531094700391577 +1.990940711183733 0.24498276782265171 +-0.0146775339932681 1.2589651662293397 +2.243521656950829 0.11008439868246206 +1.8472915120369313 0.3003235657579479 +1.0596772567199013 0.6885865674264917 +1.7605060444036926 0.40692201656154103 +0.9833307136748004 0.7358066467895368 +2.349908902321941 0.07721019555122483 +0.8121997311271181 0.7980355518061288 +2.705547830634411 0.006336205818619378 +2.4625784114726357 0.029430760447949678 +2.7337963987687166 -0.024519537654933088 +1.4342249152549242 0.5204858616022692 +0.22020266778235786 1.1393329831878847 +0.40057661247396037 1.094747592531654 +-0.480504531603553 1.3762372973482924 +2.473623384702167 0.06623631313404388 +0.7304138877486317 0.7941369081398257 +1.8160264553165542 0.3196239831546376 +0.6991536513195211 0.8475545520223815 +2.3582599506721995 0.11287507262927549 +1.3353616439503708 0.5759330310355706 +0.11226671878554795 1.1881560667845708 +1.8120174067278767 0.3596944077491727 +-0.005088225217932063 1.2400847948555491 +0.9860483277041581 0.7755636289874321 +0.8662452053313758 0.8278234809624987 +1.6358750782718507 0.5052938226068352 +0.8035816271086071 0.8541447697580209 +1.040728731237336 0.6376692271168828 +1.9324133960393999 0.23056583444665874 +2.289597469342839 0.20031187631111275 +1.2115925802207785 0.6672322783432988 +0.5118087757798215 0.9336068808291564 +2.3048890240655813 0.18261830917743577 +2.464671426284049 0.09978956349239598 +1.6778930681712483 0.3927614782379155 +0.0778507569416127 1.1110618583555385 +1.7165077151078996 0.4327087593286702 +1.0257016041027063 0.7490841971217437 +0.6752129020824843 0.9258006109502277 +1.846861164278157 0.3065518582236807 +1.8666140411940484 0.35376636543803025 +2.9256940946114183 -0.1741212308163157 +3.06017242595943 -0.14931603628262846 +1.9909381652411309 0.2423909732876649 +1.9417110647436102 0.2226512985570298 +0.551891073656406 0.9672417044158641 +0.06768453226853421 1.1688090507074667 +0.225780979476915 1.0153660576103083 +2.3042389675813606 0.1320030247273038 +0.8554717388793684 0.8785607632257859 +2.032609151574115 0.24214121533000954 +2.363288613659195 0.09382551098045794 +1.7017285744977817 0.44604718820223294 +1.9958001542484518 0.2724029690284953 +-0.0018129521295981998 1.2479985021722957 +0.7335571351513133 0.8107253333516875 +1.6179402993162477 0.424080535392762 +2.0607835987938152 0.21059225308877566 +1.3892965320253228 0.5213109773816315 +1.0024441150009529 0.7250362483625281 +1.6148697400773944 0.4547781446508532 +1.7916170318287536 0.39025686831824646 +1.0912022042111156 0.7188131341559878 +0.13536988197136823 1.1765311955953615 +1.0683918284119076 0.7841342076931475 +1.0639064577243358 0.7135646742154105 +2.320998967115421 0.13666149556632168 +3.479219432832882 -0.49413921586099097 +0.6747245127798496 0.8778796873415953 +1.7037171766615216 0.3803939924092339 +1.2148805277441816 0.5657110064362276 +2.2163703334848717 0.13822571900873926 +1.5168032658922674 0.4570490113152794 +0.5775507935366722 1.0015312653831216 +0.7866260957983974 0.8964363370876021 +2.643887116998921 -0.03327825562365161 +2.4391414474829785 0.08324949553101163 +2.081066799313855 0.16301648349119197 +0.9121523510667267 0.7443858348772865 +0.6268096138736734 0.9238195189499141 +1.3758522721171078 0.5365678600816168 +0.5830157239054218 0.9297658735567579 +1.2508020824239237 0.5832284148703132 +2.748545850093251 -0.04967716445497217 +1.7732775839246835 0.34968696273903443 +0.5914833179372532 1.0203818460335314 +1.2474986044621859 0.5916645160431044 +0.8224928784994301 0.7720842723926662 +1.0136865442373963 0.740306472147388 +1.2485544055097983 0.5583932917461407 +1.252384654801565 0.5453752266455456 +1.9232690782807287 0.27006376141823385 +0.0036819796892655354 1.2783220031909548 +2.128170417422494 0.19580803210826303 +1.0539562943147036 0.6327229289313074 +-0.8156329390521722 1.6154490311762104 +0.5805707145084692 1.061283676650942 +0.06752134456001757 1.2037905162407463 +1.450028839401885 0.5681890140703029 +1.4574460494407089 0.6324845434588146 +0.8490325033633003 0.7904835469879021 +0.7603454889763316 0.9463143248784096 +0.94033350727265 0.790870125461615 +1.5371803389778145 0.4988207743363668 +0.7410835613082087 0.8287138722310172 +1.394188575207495 0.5672301653281174 +2.861819295510998 -0.13940671864209986 +1.1104537611545358 0.6769445765654576 +2.7767791223832745 -0.12934288604058197 +2.781238938556256 0.05542151074902735 +1.920903435628648 0.2978744170952993 +3.0264401730433614 -0.22648911246777315 +1.4204437523953632 0.624881986664174 +1.3076169663889066 0.705681644203507 +0.01918911667616574 1.2038837994879668 +0.4060895005798757 0.9673376237137989 +2.1145817621211647 0.21762982645305035 +2.5554631295946053 0.012168091831094241 +0.9332482089454324 0.7718689674223844 +2.366025108413411 0.10306398329138738 +2.035330169375622 0.20990849415379498 +1.5562958896589385 0.5066615225655988 +2.34007706190861 0.14916353474676408 +4.334136007785335 -0.8230663919968118 +2.02380936052876 0.20031981035796936 +1.2416439837931992 0.6425084711093292 +0.9163234004191787 0.7529597192992992 +2.0133569405447016 0.17760864663453835 +0.7092700792858413 0.8938811685855825 +0.9860230433196101 0.8499644426838866 +-0.8306678914638173 1.5671331260573353 +1.3592182577272587 0.6167483869489888 +0.37710109447192974 1.0758448414079957 +0.9733740241242885 0.8107261979484356 +1.4727730341413992 0.5057527987793365 +1.9704205387196354 0.3688279963246174 +1.2261776441738694 0.6130773664474853 +1.9918857059788606 0.3031892457234113 +1.3448869605610747 0.6062080112755613 +2.0647782380942017 0.21007731915483807 +0.8539823665914948 0.7970811588910162 +0.6223284941184913 0.8787846122907947 +1.826121570883899 0.3234902180023589 +1.761543343888821 0.32806090673858423 +0.8001715102178694 0.8129559707227357 +1.8313228558465477 0.30517305824650476 +1.8995092030957825 0.3478333763467182 +2.032746995164703 0.23048657012873186 +1.5390390577298345 0.4299764439488557 +1.3561974520543139 0.53995317809421 +0.6702308575071552 0.853119094072855 +1.4673585796864825 0.42518538381090387 +2.1256667978985853 0.2564994712093062 +-2.003964989993973 0.3475939967768808 +-3.1027908358073053 -1.7671286798467278 +-3.130752387574134 0.5480058519325937 +-1.6984524059329753 4.705334607212151 +-3.1259813457596346 -1.3930329186080392 +-3.1113542705468133 -2.0468646198224 +-2.759905620280388 1.028855592952928 +-2.6304193206514586 -0.8935729767850549 +-2.6802912254403273 1.8990896203727399 +-3.050935241132268 -0.011344919179985202 +-2.886661864630894 1.0944262140618237 +-3.301835528278139 -1.5895806944229114 +-3.332632144824462 0.062396332358688855 +-3.0530427319436524 -1.309736530790068 +-3.022162853430425 -1.1678029058039183 +-2.9309597789802293 1.2756121903613897 +-3.512362900033153 -0.6498493592196124 +-2.3974110030297395 0.8762413949136434 +-1.5891539215573605 2.0938186296896277 +-2.429708726899388 -1.9706721653114423 +-3.1508548871510067 0.3884143918666971 +-2.0447306134157746 0.37010017304008386 +-2.031848104672333 -0.32002359062341884 +-3.4103693123240144 -3.8447513916832508 +-3.9197753757404157 -3.224315708737824 +-2.6219526413861525 -0.36901003786502523 +-2.757538751243536 -1.5861674117947704 +-3.626822340650656 -2.354905376558894 +-3.2687491692412247 1.2817128237571087 +-3.5818810418452207 -1.8009975739061792 +-3.343069563046059 0.8857760404861328 +-2.2574720880283303 3.5145081928956365 +-2.844158673897483 0.94022267172164 +-3.495171400834454 -3.034155893790317 +-2.8149414779286115 -1.4270945088469145 +-2.560804492405647 3.016709677383143 +-3.6038193757080452 -4.6881985681719796 +-2.2608422608783254 1.338193529198673 +-2.8477578959657848 1.714069808680926 +-2.3089028256512325 -0.051729319168952126 +-2.980613866248965 1.5231270076409735 +-3.611221998989933 -0.2855739050797687 +-3.0811104178223907 -1.6513364712596745 +-3.0232054985132493 -1.6195132432961983 +-2.675305357466096 1.0220212129769495 +-3.6931783510316896 -0.07874094963213746 +-3.8401098004316108 -1.7123020362738712 +-2.8800150950999166 -0.5281828895479637 +-3.1941812561122296 0.4410326124039358 +-2.8860706070272886 -0.056095851518839 +-2.693357604392435 -2.1228208029220608 +-3.758144621131072 -4.017283519598033 +-2.179045759288578 2.544950231202226 +-3.2522260578030355 -1.4119571327387552 +-2.5236918528459746 2.300820467123205 +-2.392963218102067 0.4496472251476565 +-4.078853461777273 -3.701176790164074 +-2.5368716299212766 -1.4937973451116164 +-2.8081074228582663 -0.9942988340011745 +-2.8228983970874277 -1.6318972770996871 +-2.7024178685427636 0.9281346637384158 +-2.7142285095364382 1.3906313386847764 +-2.7263275302566687 1.3604744233793626 +-2.932261922725567 2.306488623992663 +-2.6221532750453393 2.933007191367403 +-2.2972945436136465 1.1547525687642775 +-2.6845154817160513 1.539526598109588 +-1.9769479466108746 2.633053015257445 +-2.5166952326725713 -1.6906502950372628 +-2.2491328569121114 1.9143555716219862 +-2.6670349733863707 -2.9231909279395327 +-2.2185440147565667 3.4806739787752763 +-3.500283908292224 0.6906849507407837 +-2.524036889917096 1.4882924765415506 +-3.2091685046100875 -0.3616902101062402 +-2.8278217036176634 -0.17217623191304904 +-3.067184741003532 0.32373708365566556 +-2.8125926541490136 -1.2421882437067024 +-3.250199239054654 0.25371436320288226 +-2.788129732535044 -1.8884189166881313 +-2.101983660208231 2.0982337091985093 +-2.4066617876170815 1.821670811638209 +-2.202421482302402 3.7863964689228324 +-2.6825235296442234 1.1496643754207214 +-2.6726235098247795 1.4370410666285478 +-3.083736501303551 -1.7861799292668703 +-2.060076177533272 -0.07921068545707227 +-3.3241871943568073 -1.9231331635756015 +-1.9780140507796804 2.0723103352452577 +-3.8691688893365965 -4.869420277360063 +-3.4930167433961397 1.00926174430037 +-2.510422266058556 2.8874293644569096 +-2.4807717243591867 -1.5998852352300454 +-2.3760616017918896 1.579008040422435 +-2.4437119653026733 -0.9371092161050658 +-2.3401780635340126 1.101601276731196 +-2.643392587065052 3.304510346037937 +-3.111496697780003 -0.9597965201470182 +-2.606260113211273 3.1611538199734825 +-2.3946144685972484 2.132015468569527 +-2.7759255511705483 -0.6325626142780907 +-2.579025469352546 -2.211578605681175 +-2.496989001954287 1.794154634113234 +-2.510508014863332 0.9093104705045969 +-2.1653815477627965 0.8635121213310031 +-2.3443416356533224 1.7563137511511255 +-3.8264868991729064 -0.584988208906744 +-2.456317217192036 2.1298797019642843 +-3.2887171103299093 0.29206670545828556 +-2.882070913281917 0.440850295052469 +-2.4390265674557385 0.5508130820184072 +-2.879316875649439 -0.7268317880737318 +-2.5740331715027085 1.1478563960407004 +-3.2756350813828723 -1.5425680417924672 +-2.798390495551237 0.19841083971851736 +-3.146714386914234 -1.882326132348875 +-2.5380826973118666 1.2925399790827667 +-3.0493678091693304 -1.102143087802248 +-3.060063029345246 -1.5555859266503111 +-2.759847524802052 0.14317440307607987 +-2.284159813281084 1.5133706929449615 +-2.6770799554030384 0.33637068515229246 +-3.5403439749208974 -0.5346005830936735 +-2.8459063886238214 0.059758239310561734 +-3.6166414786173937 -0.30519659205019306 +-3.141714050589957 -1.0719592176216488 +-2.299300954940186 2.0060614176531937 +-2.3280760433821595 0.9369561223834986 +-2.4208889840246117 -0.27710439638155565 +-2.3188419285089736 1.1949020347393835 +-3.04788209772291 2.1120919674790857 +-2.683107376594158 0.4693014726917596 +-2.3928576911956183 3.450282523826754 +-2.4925179151111823 2.5846796754562575 +-2.902263641940132 0.7703586764144859 +-3.382741885991898 0.2828913427252466 +-3.8895428039000333 -2.6561834719217683 +-3.3806840159632325 -0.5029052776342744 +-2.126053190419257 2.784745005144991 +-3.2188418266887155 1.6318283597387477 +-3.56745959747045 -1.695224689644971 +-3.0377601935911334 -0.08718255288093424 +-2.8830066021731753 0.2309827971647583 +-3.356327574360911 -2.7425245019697266 +-3.1551744341821926 0.16652179502022735 +-3.285095283610371 -3.553150216233389 +-2.0316542183184496 2.981068190608243 +-2.9307589536763765 2.335562416100572 +-2.9141604966018595 0.4776986563731862 +-3.4850122524661598 -0.9406364976347379 +-3.612585322886568 -4.100443507323547 +-2.974077396371008 0.5544234421896861 +-1.4911766225560146 4.752134107396487 +-2.4886195776562157 1.9793781373459949 +-3.2604694262443172 -0.37854496351284844 +-2.4248480790836515 4.9377172975411385 +-2.786874774875957 0.5210663894524673 +-2.468480205372531 3.3171971184493687 +-2.6474760729917772 0.8210458084663342 +-2.959045966974858 0.1857357653059426 +-2.1525970466217035 1.5190035020896857 +-2.8351709042396243 0.0849265680588453 +-2.7036650308742485 -2.340896993974528 +-2.9766312467660208 0.6793519733701974 +-2.5356579143488696 0.7383647785674754 +-3.5220912678254197 -3.476397397637483 +-2.6718539067496754 0.2842889409470672 +-2.6246947204026507 0.546271717910839 +-2.4506578069571097 1.8203126795221922 +-1.9191002301567415 6.43719451415384 +-3.09350616249621 0.11646508201994499 +-3.0304007776469946 -0.19390739391510387 +-3.067678854715857 1.6728926663285426 +-3.2053301812977626 -3.1500845861733056 +-2.458936589665659 -1.0209936746404824 +-2.6814604188771756 1.8531899704104504 +-2.0349458035966896 2.926282138263221 +-3.528029522819439 -0.5258953372569604 +-3.1784489560499822 -1.5469950823784435 +-1.931507023003466 0.7402103556112434 +-2.3545702340372547 0.48544519916312884 +-2.647673425744624 1.6118444885602181 +-2.4972947583638168 -0.38240300923584863 +-2.4644289827929677 -0.10910526571303286 +-2.1004582606391597 2.8218779237744798 +-2.749542110259799 1.336436476237748 +-2.7467487489756177 -2.149714945783606 +-3.114395529304697 -0.6914981753168776 +-3.293286064630281 0.43017498726130116 +-2.788797581042229 1.639742559582472 +-3.1959157834175356 -0.13310440819532326 +-2.7902779399636053 1.5150888857871347 +-2.344569267695956 3.4196208460312323 +-2.8033418846740936 3.2091808904149666 +-3.1685065451713053 1.3827233260920908 +-2.1079485928093593 4.20556655552373 +-3.6620834429993625 -4.135094431704473 +-3.282379202820924 -0.6696652059237524 +-2.3889900041133374 1.42994080153361 +-2.6898978412132593 1.37044611924681 +-2.4433595218461686 -1.0816216848453142 +-3.8257226648722096 1.3726479286625901 +-4.029456404355637 -1.4946913338928203 +-3.1545183030566926 -1.3519107904850924 +-3.082141065581487 1.7035931955291448 +-3.03130366261118 1.6299110893990947 +-2.7550980874928146 0.26207930286135084 +-2.3849715555500697 -0.3972639962886851 +-3.206219417211117 -2.8072290594761395 +-3.995422817397822 0.428937828705286 +-1.9074629453889442 -0.41388341625657527 +-3.6012981817234486 -2.9487437853466796 +-3.34542465786766 0.8593636450625257 +-2.911991882452157 0.8811632882764409 +-2.6283721389943455 2.032608751010141 +-3.295284093466691 -2.3826297234699068 +-3.3648873706225526 -0.6524194593942139 +-2.840978985964945 0.3184755107976849 +-2.446961409510393 1.7375515174087481 +-2.9594126137624057 1.8300421672487712 +-2.7746422506113966 0.31136652881456117 +-3.325914597441343 -1.0770870096786593 +-3.035731527619655 -0.5145540404751396 +-2.7460680030272284 0.8755331291861628 +-3.101249474131158 -1.258344466944888 +-2.0627481145509545 3.8339109291261724 +-3.0306004886807467 0.133243364722499 +-2.7379367099555334 -1.526465631318512 +-3.038491620740476 0.48395705378457066 +-2.6189927824478603 3.823085054771492 +-3.1925093562338267 0.10716996509850413 +-3.027620798563297 -1.3953950939599575 +-2.7112263187490213 0.15698630155371984 +-2.913872499147443 1.32976038295164 +-3.012803168821913 1.949015379026811 +-2.0461468926226565 4.536396773382904 +-2.9650554797908377 0.7134988393594394 +-2.809378580409309 0.7054813268290427 +-3.130961114195359 0.39965393658931286 +-2.326894926536841 1.577783857728932 +-3.4382669873224554 -3.2514682468358287 +-3.380664400578128 0.5624252413272308 +-3.44185183424664 -1.3984357416564797 +-3.5922221913369285 0.24427996679224767 +-3.0797375603078008 -1.8379553060561657 +-3.861400134911743 -1.8576856846397614 +-3.3960844806752957 0.0859700212065207 +-3.0534799618745265 -0.4760384803779352 +-3.8067539508430244 -1.0311510475337338 +-2.5838752117618315 2.2255894611146143 +-2.3294469432374667 1.5892931039802203 +-2.5325600276307116 -0.4013267342109647 +-2.5015306616986486 2.0025988403378947 +-2.4930009486574765 1.160248535657346 +-2.204768704373428 1.0869775506206374 +-2.7942786254963754 0.4577740094623131 +-1.5939916078003655 2.805084077955681 +-2.9148124256376917 -1.2755370625969797 +-2.535132224265542 -1.1271743548416855 +-3.638780191707241 -3.624368439397275 +-2.4497450386965265 2.5235784478992573 +-1.976093504952182 1.8331348588894205 +-1.8783592377173992 3.749400300416258 +-2.1496219229516464 -0.59402903655203 +-2.7678018066191825 -0.3906504518207142 +-1.9915043544407924 2.150087814198596 +-2.360876014892216 3.2908529339982886 +-2.2823760213517295 1.7107315326170627 +-2.7302641760587436 -4.451350807305164 +-2.392612187595139 2.359671889988846 +-2.7020726365327907 0.45879970273109827 +-2.3232750768560124 4.032859211909468 +-2.652107278710733 3.4616777058961143 +-2.3866990065676497 0.35115960536394225 +-3.5909708593341225 -1.0592359754902656 +-3.2166985135453143 -2.594599013855075 +-2.2874804092419354 2.572874429369826 +-2.6997171590121987 1.6928671610492259 +-3.034239301632013 0.5357496299970583 +-2.890555436284046 2.2931596347383887 +-1.9622428151830427 3.3091340073484985 +-2.5031853597872074 0.2711497106073482 +-3.1628283465503473 -1.583260139360454 +-3.2543163887465303 -3.209216667058218 +-4.04438572709923 -2.568626027372075 +-2.787017168476582 0.8943353803119933 +-2.6481183928248813 1.908551423289084 +-2.762351535177254 -0.1349292843934899 +-3.264963951108658 1.1806464494157995 +-2.53261360215204 -1.7712652106782896 +-3.2488421619402543 -0.42399694275686334 +-2.233068320852539 2.587421392829709 +-2.2337906598164112 3.903830968351603 +-2.3410943562796884 3.3102185162809397 +-2.4678876083657153 0.7682955141113815 +-2.488383966323765 0.992692135578789 +-3.448164133970435 -3.36003921680621 +-2.2879627621511984 1.7530744807027114 +-3.9490460891116315 0.15603659752003324 +-2.1933103963299514 3.110730567064385 +-2.340049701033436 2.3278529762203517 +-2.7326838409230723 -1.266092538116293 +-3.4650315271369685 -1.156204404451546 +-3.412814692632195 -2.2448117099219385 +-3.120288915022624 -1.3381544004435133 +-1.9153707305794465 4.7135323203248936 +-2.499873041047713 2.758459690766642 +-2.860258272767349 -0.9330342241287424 +-3.518572629625192 -5.008887628223604 +-2.679686299652582 -1.7485401226128365 +-3.5219423978132935 -0.9133811920563462 +-3.5971695163771074 -2.893427850677606 +-2.0524414961765873 2.9362858024162453 +-4.167067113750906 -5.879783302125316 +-3.107063881110963 -1.9555917819031108 +-3.018781969294531 -0.861508366352024 +-3.443097949841772 1.323504494780487 +-2.9179888050232856 -0.1863655848883154 +-2.612946826400766 4.085081709331858 +-3.3303072138299594 -1.488646384992106 +-2.1804895995129248 0.5174299334345038 +-2.6701752302425854 -1.299945234482609 +-3.07086160943297 -1.8207065260229292 +-2.0387571156849202 0.5953737341167396 +-3.44821568021859 -0.9180610674197549 +-3.8872675223119604 -5.471542140737606 +-3.207753110295732 -1.549027133917197 +-2.3403385217804287 2.078143775424253 +-2.7526857696075555 2.7238151407280293 +-2.863758861644801 4.36430328527763 +-2.652216211694151 1.6718844451543995 +-2.5464893274499367 1.8464609396090177 +-2.5960086454199853 0.6412034240711839 +-2.3230438479514137 0.5288740008962229 +-2.5481796538370114 -0.9683648736101955 +-1.8300225212367383 3.306425866068731 +-2.286661150494157 2.661380072765488 +-3.0182145745947695 0.7320419199461536 +-3.0456133641225813 -0.9699418211601571 +-2.2178161445365667 0.9484963268832001 +-3.2556577180448714 -2.8069331004770657 +-2.4188060654606205 0.3076095281970221 +-2.778296801412135 0.22333322441860073 +-2.707747320029669 0.9986145026045452 +-3.1948504431384084 2.157929141203235 +-3.039535488539861 1.0785279602912279 +-3.534432830093762 -1.231717950999386 +-2.621018564520378 -0.7083191530108128 +-2.8910924773186104 -0.60190891340612 +-3.089365428846966 2.1107376167395646 +-1.5963019770940465 3.4365238083882557 +-1.9359051281467237 0.19379938267613678 +-2.5118701365620506 2.6084570989741027 +-3.0525694702382613 -0.2115064275276019 +-2.674355676954913 0.5290539793289888 +-2.5431668399901683 0.5180659943954855 +-2.90122635248846 -0.6353726522816348 +-2.0478298595792954 1.5352536495889022 +-2.3322851481924163 2.3306666502088675 +-3.1930029941719833 -3.046068583748894 +-2.7400538280442843 -2.3470658860265416 +-2.9406183484912756 -0.04722608394537858 +-2.9142198999351088 -0.5377890291861995 +-3.036595938742279 -0.48358221488948055 +-2.6832168681815074 0.9439093179964049 +-2.5431105617537573 2.3697621522409653 +-2.2461285610696846 1.71553979452709 +-2.981294726728331 1.1395741925704461 +-2.970401399341629 -1.1555684832820459 +-2.099337098494839 0.39777375491915684 +-3.4426830184023722 2.3444889029521065 +-3.0707862521595457 0.3732949008167466 +-3.0849545035902692 -0.32548138217594486 +-3.5163820300701305 0.024214248370682145 +-1.8177259992466384 0.3952078439859798 +-3.297839215234891 -1.9140852480965727 +-3.712867446762136 -0.0312934698676704 +-3.1906200996126697 2.961650969538397 +-2.7143262556187633 1.1630449555523514 +-2.2886144152371735 0.060400380674971055 +-2.46938393841161 1.1681640801373643 +-3.6926878376572168 -1.6809583160494448 +-3.4886663754955936 1.7202320013379455 +-2.4909143486856484 0.6911973530209303 +-2.0505620721996864 2.320095282480874 +-2.948164589954257 -0.08800580610408953 +-2.996234643641521 1.839955931474335 +-3.1418431323086664 -1.0008867566569726 +-2.155376957228793 1.3771634696447566 +-2.5105187839494385 2.514182698642799 +-2.970502153646097 -1.1065521365132054 +-2.001843933374374 3.1450829600941344 +-2.675385990916312 0.4714215854825027 +-2.2864388699571236 2.8730926531129812 +-2.667495628312687 -0.8223581992106861 +-2.7446862095603755 -0.529278973764242 +-2.353933469578899 2.528434244217451 +-2.505991523563862 0.22851807739354407 +-3.249414225411809 -0.3313839170689055 +-2.4229517556236893 -0.3413757925095284 +-2.069471261682066 4.0951036718410565 +-2.623121206465324 -0.388528678419842 +-3.0505155899321448 -1.2627195620132752 +-2.248914179083321 1.6121397617386295 +-2.9854200752347637 -1.254607822191963 +-2.961003992909514 -1.3463073197558721 +-1.7509561800450104 6.072257410342914 +-3.628259778770791 -3.046041377003372 +-2.6160090429716605 -0.06695178314958478 +-3.3333435118366417 -0.9842187005952963 +-2.236125543765458 -0.06143700545955971 +-2.7214188857426644 0.5065983369460032 +-3.0351120955388255 0.5883259368271752 +-2.804153429514721 0.15921998501388207 +-2.9310766985573733 2.7641899423375924 +-2.1325750995565236 3.236871677617558 +-2.759552874691542 -3.294739478493052 +-2.1818881610702334 2.996751117063458 +-3.1195842260771647 -2.2347256421198165 +-2.2274754362467895 0.3492735254951315 +-1.9600741048001549 2.251392279945674 +-3.207543167653485 1.0438338510216898 +-3.163795259921148 0.8484918885833965 +-2.692465148464289 -1.4345852878015282 +-2.415832059125396 0.06936098882927216 +-2.3853101099308014 -0.5256713423021075 +-1.3051810337624754 6.66048425332061 +-2.9259597899666936 0.3063762422170909 +-2.9523714100714598 2.98212743913611 +-2.6500412889040534 0.2225858880001974 +-2.592963503177157 3.0765998466521145 +-2.8531347194247396 -1.1720594752387452 +-3.2276217848772344 -0.3253753158671268 +-2.653604694185707 0.11038695820519728 +-3.093818882302668 2.1811650469213726 +-3.0066460259821537 1.9023635465262472 +-3.611125572582095 -1.0391867177868703 +-1.9833348208539627 3.5397675203248204 +-2.9168800192230333 -1.1659332125370023 +-3.2983448363377916 -1.8226244579661546 +-3.495322446229703 1.4845410061219444 +-2.944578378892221 -1.2882900111039173 +-1.8599845158799628 2.1091911968212957 +-2.586106330806315 0.961376260867653 +-2.9008419865859345 1.6025408537429002 +-2.727116208166386 -1.8377461879559376 +-2.354639544020244 2.586694757930129 +-2.710969253291524 0.6209688776035287 +-2.6752822670252265 -0.2527900617920658 +-3.0563626503164776 0.07726826704805678 +-3.47036035295061 -1.185785900683958 +-2.3740882261891447 2.1861431088177024 +-2.381208175986826 0.4339387341530427 +-2.5510062220368344 3.700814598651733 +-2.978931615016812 -1.1267428368104238 +-2.920072898544152 1.2438926233899412 +-3.5309171881958434 0.44504479759840126 +-2.848968189958229 -0.321693353215238 +-2.7163744418354745 1.4910964304372276 +-2.14161604564089 2.6725378122554164 +-3.257920083242414 0.07729542438318499 +-2.6397379505689553 0.9260659242936173 +-2.6228104403009613 -1.45940559040285 +-3.547146996396151 0.04755402219574151 +-3.6038436526907605 -4.325134946981811 +-2.6994711492934704 0.5763604066543302 +-2.5361631375062506 0.030381933205007428 +-3.2150748205687156 -2.058125785948039 +-3.2952483768251617 1.8917906742520763 +-3.428342063983796 -2.1581763406217087 +-2.905540223776743 -0.10970123779583635 +-2.173784370787019 0.7585724146902093 +-2.9219488795016506 -1.4915479837265972 +-3.846424805317869 -2.481715501760161 +-2.569852853170531 -0.561169708768837 +-2.313764034578236 2.5573555016168052 +-3.322252980215815 1.5418464248910475 +-2.686143573398914 0.47865718913365596 +-3.409818048314351 -2.146932165395264 +-3.2543810384801324 -1.4715341079276572 +-2.828183899664572 0.6528661335798713 +-2.426117045756542 -0.46604556983084694 +-2.46723210085782 1.4472750915624761 +-2.5209622160093015 2.2241947840669414 +-3.4205655893897418 -0.057865299541051185 +-2.816916045908364 0.27499089752465644 +-2.1321840203440465 0.8991607383209108 +-3.5933452777426007 -4.729797748413669 +-2.264970579220729 1.4480441043409187 +-4.028015294530402 1.1383681178617424 +-2.4213807383955945 0.5323850987463195 +-1.910413263572182 5.229735197505024 +-3.813503344942597 -2.2195401371659242 +-2.152858751400541 1.7946141547358876 +-2.608151919206618 0.6166394509004608 +-2.437697137816138 3.3230169258892803 +-3.7524321804784835 -3.2135306924176614 +-2.2641425999184355 2.7681915308033487 +-2.7917840324901486 -1.3485792933781044 +-3.7030849768240373 -0.2816350954293781 +-2.4440761403168203 1.4409477651378073 +-2.7560786820489542 -0.1435893425956048 +-2.8322761081532315 1.342804327549949 +-2.916737598275048 -0.6852402746353736 +-2.3459920825705294 2.4910210924487215 +-2.2536368673413554 3.132724499505218 +-2.4439390613283365 0.9858828859521227 +-2.6067511827378187 3.4849784151095946 +-2.0910040056875916 2.543519262427747 +-1.947368888824729 3.5699401339400167 +-2.185124046933052 3.529340879884824 +-2.4843503640235993 -0.20226934961352855 +-3.0849046901677992 -1.2256730617390899 +-2.6940373314012787 0.13936736387589843 +-3.472542649930363 -1.0337889434445604 +-3.0810959501395034 -0.8831728964046033 +-1.2285879561565058 2.6970431501241885 +-2.594214277066751 0.837257773735101 +-2.4864723535398543 1.0252435601484262 +-2.500342150611155 2.0783581041496157 +-3.029171909563233 -1.6054284672111296 +-2.633831092150862 2.5098595880564116 +-2.9825035678080063 -0.3354161909747742 +-2.099227172315615 1.3654957398527983 +-2.0944251167069265 0.3269971480782718 +-3.317154233721747 1.285451504446995 +-2.7581316713540236 1.3953049648264875 +-2.3648547150223016 0.20130796713338156 +-3.2657069319627827 -2.726249212823728 +-2.7302782755377253 -1.100033404854506 +-1.5008956913898892 4.575305737191065 +-3.4988296617001824 -2.8233733134644545 +-3.731341853252453 -2.915245282105313 +-2.9167003143891814 -0.15788300068110345 +-2.6087380853837767 0.6464800069061644 +-2.4442756172495432 -1.1548775498093744 +-2.29470850873456 3.3057350464986905 +-2.092834507251843 3.032323233753986 +-2.5896159254601363 3.0179271416111533 +-2.839454082085097 -6.061914382751721E-4 +-2.377854919188643 3.229833188980377 +-3.4676864565219248 -3.232176345848526 +-2.4381119188120244 2.1817790818796694 +-3.298070238809241 -0.30225450773654755 +-2.5389507121450885 1.904844868433024 +-3.8712683148924567 -3.3781970831104333 +-2.7524405358494506 -2.180278445931915 +-2.830775130284478 -0.3675011469422104 +-2.7148557860817375 -0.7584869159716223 +-1.8784036163903064 3.8273254150850295 +-1.841272367556873 2.866418876958647 +-2.6103980038778944 0.22487062650619305 +-2.834267802128178 -0.09035941671862308 +-3.3660245398572677 -1.0649398225738922 +-2.963557086648102 -4.536747625383786 +-2.874878843841346 0.31920871946068546 +-2.4799815534445537 0.23177487930496937 +-3.365324472063879 -1.9652613700667079 +-2.7581773239454344 -0.7645948808289571 +-2.6960961240291654 3.0440279900150014 +-2.726511778463734 0.709402505194582 +-2.965582916538106 1.5062301379170089 +-2.0548017623178714 1.5280761568833738 +-2.1605618271227827 3.450349874148053 +-2.6926140793733064 0.8170921278034489 +-3.5414437032593344 -1.8967837256705442 +-1.7731472760821005 0.80543692801135 +-2.6792063535851964 0.28014428981153683 +-2.5708620241745015 1.7337481472565741 +-3.308159408910124 -1.548658037080617 +-3.4272868167073947 -1.8489451573247124 +-3.4125185072155126 -0.1818953395575021 +-2.408168220664214 2.2253671947836016 +-2.6450439746688232 0.06879309109022375 +-2.975825081797278 1.3690738911942417 +-2.5602115722034884 3.2922778805442077 +-2.840971587249632 1.45729333160802 +-3.009292390160152 0.41024555515149463 +-3.172627034965939 0.2282435189373439 +-3.6242935384014743 -1.6023059124947434 +-2.1590225905132323 4.252278126289358 +-4.304810725178391 -5.684206739503748 +-3.2873991123266033 -3.289503908464324 +-2.280540916705461 4.55598775986637 +-2.61893120628861 2.194782193458081 +-2.3787722490427248 0.8721806306337498 +-2.904156371703379 -0.8059147264561433 +-2.284083127767259 4.299996449200254 +-2.723680252309172 2.58469435026652 +-2.91080887376253 -0.024525712239581388 +-1.5901844897803787 1.5679918866351843 +-2.832399861342387 -0.8787495526513136 +-2.400429956026672 1.7638463634009098 +-2.757319343060155 0.04054932442226089 +-2.7013556211214262 2.3354477659886763 +-1.75875755769656 1.9771849468969322 +-2.530558899349097 2.668183791179233 +-3.0891187338026795 -1.2200257645383585 +-2.2434925167374318 0.34562450450583765 +-2.9472130899409854 0.7777290758584532 +-0.44936320061271356 0.3218917730996631 +-0.10549245962856146 -0.6667109172627905 +-0.06989235007302352 0.2643673480434228 +-2.3997547463601534 3.0664009493506446 +-2.806252354437425 1.8417440229924322 +-0.7070177228693542 1.2451735848994292 +-3.047353205311445 2.596159978580526 +-1.0126714147677554 -0.5977923052205802 +-0.5839586660270534 1.5807623834027194 +-2.116012565741001 1.0954893829999428 +0.02290619811414374 1.4258473477001883 +-0.07562734449117547 -0.43107269610785076 +0.6837117036843737 -1.7049551293485221 +1.0432115006223959 -1.8483996470333426 +0.051576412330928245 0.7598544500210829 +-0.8781233108044276 -0.5483426343964275 +-0.8944796949252991 0.01293155080775249 +1.0418069549585223 -1.231440703115196 +-0.0788410743966087 -0.9335461536290848 +0.13667645769098502 -2.4824794903767025 +-0.14951000436477246 -1.0944151772563575 +0.2510695458779221 -1.5423735082156753 +0.09307838990874984 -0.8472207463831614 +0.1597773364357028 1.065638807674203 +-3.1693037059295746 3.1888899553075345 +-0.07034668272399958 0.21033335000511083 +-0.7755666021941358 0.4830355515800488 +-1.3023965781757019 0.7537166640459776 +-1.5895355541770935 2.0736602618051463 +0.7324321475748025 -1.262476600308211 +-0.5457425100791835 -0.1636209609295562 +-1.5341045401445395 2.7608985965151533 +-1.2169629339326533 -0.15991758964503433 +-1.0358441240181635 -0.5132944223562119 +-2.7599207810389914 2.8512240293733218 +-3.4703969848159004 2.1570049010790657 +0.16969784754727224 -1.9534948415267461 +-3.122827268490017 2.49226819508765 +-2.5673431946636396 2.0327062397455014 +-2.611443657827279 1.4211587352731216 +-1.0564436004357693 0.9109049005781866 +0.7590702285311343 -0.7453876786605751 +0.39597513737177314 -0.49958613811128566 +-1.5455285644017327 1.0573289744214047 +-1.116273109353285 0.30275138041841376 +0.38858128687157134 0.4249822826789859 +-0.1355947750508868 -2.411948914817516 +-0.771927314797984 -1.3490790581402208 +-3.569673212402213 2.9387768360646076 +1.2978720188462218 -1.7326752702177912 +-0.2941917386670103 0.04131581760719333 +-0.43020340146722924 -1.405547119039158 +-2.984848272373674 1.5145656571012787 +-2.382931537954869 2.0674286547089333 +-2.063893178976025 0.1894079915961991 +-1.567304627103149 1.5297179028623573 +0.12374366854181529 0.4357267806845861 +1.5906508685311906 -1.8315557316861213 +1.1038010733378365 -1.4948476435023672 +0.4774123741112902 0.5694838172875529 +-1.8151496789050556 1.2727208320843288 +-0.6204364411688429 0.016324717096953822 +-0.23335812895795105 -1.6867274395039673 +-1.7209675043878392 0.7652311900662134 +-1.4359589978305118 2.256519192812928 +-0.19637792112253194 0.351825288995002 +-2.3866615740042243 1.6434094413720017 +-0.9323602195541021 0.7791951176842955 +-1.340853070579688 1.8208878043521288 +-2.6716345011090072 2.737927516002275 +0.5671744390526904 -1.1625279347567286 +1.7488218085731417 -2.7298069845234645 +0.11206052526241495 -1.2461023263879665 +0.3549419882228343 -2.5466995585252286 +-1.1916112133769641E-4 -0.014355011699470954 +-1.8519760608764808 1.2768514275518619 +0.1163947036767522 0.04489349334119319 +0.22069960446414594 -1.227323035515551 +-1.70210018162611 2.2068625747803305 +-0.7517292063592922 0.6969234673634327 +-1.0742659466570958 0.9059423056632705 +-2.617270837313671 2.516024539225689 +-0.29814180917177324 -1.7575929187775867 +0.018412119742329924 -0.7491068246692503 +-1.347698219199346 0.8974823257588203 +-0.7913018058041696 -0.11270301401234067 +-1.6249410928439452 0.8015368012497535 +-1.104203083572142 1.6486557215224338 +-0.045463493630912066 -0.6491897451367623 +-1.178027449028071 -0.050369146738678094 +-1.8415639805266326 0.9986486273546877 +-0.25954151964929684 0.11453226476769643 +-1.6821657916016495 2.0013383409459813 +-0.9020144105658965 0.9129467229347685 +-0.7156298384400506 -0.07543829772070787 +0.9053937733177327 -0.9615658670651208 +-1.9750632425769128 0.6437739158005216 +0.33792520406362014 0.12969907524903895 +0.12945207355177057 -0.2924529007065822 +-0.038852397830231866 1.3778263651824858 +0.1787432113461448 1.3533341776251075 +-0.7861809243363105 0.02758826402978204 +-3.41061254109295 2.1224766427834023 +0.8548621083058033 -0.49800024034840545 +-3.1016779819595617 2.206837878271349 +-0.3794802851533251 0.07777767571566524 +0.5081387489661063 0.14658457678261932 +-2.8970273712070367 2.9831116547552368 +-0.9588833416907098 0.0913981837356167 +0.09557481897092923 -1.5520855556024489 +0.01335277179895944 0.6744853368700485 +-0.43319139827106923 -0.22313161895160732 +0.7722510809030517 -2.758681522962368 +1.1570336371148908 0.0026426544588566414 +-3.347171761449942 2.5366749529337396 +-2.8753733433278867 1.8747991363554692 +-2.1853294371686016 3.161645751349182 +-1.3813392036770495 0.1793207829086757 +-2.795652511293844 2.9834955633086793 +-0.5564360295010037 -0.3855679842388166 +-1.9589787720086655 -0.6879684278967524 +-1.7731866961483518 1.2053735538449717 +-1.4755838623104585 0.7514491090889441 +0.643597587999185 1.2948647553908423 +-2.146421115444852 2.0179756192904317 +0.028975005944989585 0.46943382066090206 +0.9902778390981412 -1.6925930075803564 +-0.2628965858923561 -1.4837759819741052 +-0.8450653239885695 -0.37143329323165214 +-1.7634933178240932 0.542715585213474 +-0.4694298443576388 0.5760028714404888 +-0.30515334808690064 0.8431834266937437 +-1.690179157572743 0.32665128009701344 +-0.6202724753525795 -0.6827272610381627 +-0.4264386097010034 0.08902721737908639 +-1.225033799502559 -0.15100529052548933 +-1.4347322200433426 0.6062515466672591 +-0.391243851956659 -1.3937020208340092 +-0.534960192761063 0.16168018156303732 +-1.7263578063997813 0.5155398867630393 +0.8718771424622237 -0.8542979619542361 +-2.9040473743571815 2.4571953552554344 +-0.618542884978712 -0.2570101570856074 +-0.4546132939110453 -0.3108128626598125 +0.8686738065453375 -2.2757433938744915 +0.7461682854768873 -0.8816832502336356 +-1.7549191865907847 1.1440237548935939 +-0.09193526075882585 1.6278533669070225 +2.3462302202505936 -2.5950254562512227 +0.46825848923451907 -1.0545492116971351 +1.520701888359493 -3.6352844811945326 +-2.525209036572871 2.4679312040939103 +-0.9555843330747872 -0.6221410407951182 +0.6262998601169577 0.7522864800400997 +-3.3872232753321616 2.7715903514894658 +-1.5799831993351015 1.9247211991186697 +-2.9913011849493696 3.397061848874418 +0.9080333036380427 -2.1551171112114575 +-1.1637374376533383 -0.42286349843415233 +0.15748772807998734 -0.7769654095467253 +-1.619224956940592 2.460683130382302 +-0.7713670337159068 -0.5641525388490103 +-1.3904517053525838 0.08315236618536326 +-0.29456712086547376 -0.6040409970017586 +-0.2812717388255983 0.3840469803202785 +-0.5632542484159876 1.3883151941668463 +0.9939251683693212 -1.0601980572080758 +-0.9804350124411361 0.06454396507296856 +-1.8641571673037118 2.4997842268837744 +1.182924616790535 -2.517490243648658 +-2.6023967806534722 2.239782474187449 +-1.2278760532241515 1.2863650645217957 +0.8518035068096045 -1.1481419807039903 +0.36971182633290234 -1.7291624543098045 +-1.3402772734881812 0.2081655215294375 +0.22809773668429434 -0.03268329489026811 +-2.6254417211617995 0.9382915308908493 +0.46635243633040113 -1.009205931078253 +-1.2771646864363113 2.0595303642138902 +-0.4583623080445026 1.408504913453505 +-1.8858328578778183 -0.5160637893303988 +-0.3754907351558475 -1.5073606482234303 +-1.9874610931615497 1.2240314521969158 +-2.0272137400080705 2.0347388735382133 +-1.6368701287626712 0.6034429585258458 +-1.4866257945591657 1.5804921932505525 +-3.2900001422646663 2.0580677422125433 +-1.6947907891119836 -0.216815300388621 +0.08219977142433466 -0.6387539436015452 +-2.559037901447475 3.076516555779421 +-1.4728689547552218 -0.2379644366252654 +-0.46028085475899305 -0.6340836964669214 +-3.281672326657884 3.89662487727941 +0.611496420783718 -2.7762976739148826 +0.9983300291926912 -0.8241210106669412 +0.8330723853225445 -1.5919385024972048 +0.5549528987327031 -1.6815158436923612 +-1.2420426983704622 0.7584898888936742 +0.6331852691023784 -0.6624110835092225 +0.7591129084824036 -1.0140269424371633 +-0.7257877597131469 1.758891518087351 +1.0413624387878455 0.06135299246377318 +2.7559910181062452 -1.4158182065250746 +-2.466008324100575 1.0028308057030508 +-0.14074544288721114 -0.4868603771278614 +-2.06837048142521 2.9066684187529788 +1.4172372045106076 -0.9316703973811626 +0.5021072282477629 -0.8031033174723605 +0.18422264925597442 0.6605536756674091 +0.4869614867435236 -0.18595199991671335 +1.511137155490336 -3.4081669433531285 +-1.860671725044233 1.9975497285549257 +0.8299120579683655 -1.7691218190295013 +-0.23311875052860243 0.048256160870955134 +0.7291309214722125 -0.8776615702801369 +-0.4460332544273467 0.6551688708451736 +1.934630922819939 -3.067605087981706 +0.1248072529886729 -0.2882706543134114 +-2.258523760441709 1.261670601894483 +-2.9644700109621662 2.135278212777077 +0.15862457218094994 -0.6809274513196708 +-2.276141974347978 2.942670191170108 +-3.47383432696003 2.630139473267284 +-0.23520958489174115 -0.2460201742670472 +-1.257207333743167 0.8172168024234074 +-0.6652371037820126 0.6268760411786812 +-2.3420416592599027 0.7646098676964113 +-1.2838680863432408 1.4379802280148948 +-2.6492993920879493 2.9625789677661647 +-0.9965087003152143 1.2536364014965182 +-1.7171272203256138 2.7471496806835205 +0.4293687494665074 -0.7673238598943328 +-0.03934511585233946 0.25467883605981867 +0.531729970186743 -0.12488513019581055 +-0.5943800532900227 1.4958053405628688 +-1.8697915421565146 1.349449301220016 +0.4547873211592629 -2.4068565296192688 +0.34807439585203126 -0.12598503594673766 +-1.1478342653738631 0.7307741600099449 +-2.59142300297305 1.2299171314700517 +-0.8501252612862247 0.7505556524193893 +-1.5541160462463122 0.14453571593184605 +-1.262389831875839 -0.6410949027009023 +0.5659984217497734 -1.5087405534003215 +-1.1421985203242344 0.6522974390671323 +-1.4781245983366214 1.8232448148230442 +0.837191720328582 -2.3813777004364773 +-1.3334979676192733 0.7921363794782905 +-0.9016234664888269 2.021713581709265 +-1.6885355142211087 1.8752953274500104 +-2.756936086255525 1.9111745971885266 +-2.703746821319623 1.7916257407204195 +0.35632241565523304 -0.3907801345588016 +-0.6067343933047465 0.015273267810908597 +0.9213162242619247 -0.9984157928614186 +-1.1131056484058424 0.7473955113394004 +-1.440608596395493 0.7334313083248494 +-1.4545022757739468 0.6296495812158072 +-1.8888316593690453 1.0325349910830686 +-0.09456608533901478 1.4652254650863195 +-1.458804061042221 1.11832604125004 +-0.20535711857586292 -0.7290238869789498 +3.4347338995056544 -5.175653620399564 +-1.5074264658445207 1.6850371628526721 +-1.3924100209779446 -0.04214430960567889 +-0.8770220206083343 1.252881095793679 +0.5416756245471889 -1.659999687770584 +0.6927564418366431 -0.6001945709487821 +-0.7422495934064456 -0.3351202165628618 +0.31229731460894217 -1.3514597369742096 +-2.8474370557884736 2.716087688805686 +0.307691112336283 -1.1783622907974636 +-1.0612561202000101 -0.5509750591104696 +-2.783774756882763 1.7140635165433251 +0.9937984932103944 -1.7426654395796417 +-0.3825739998088205 -0.9535882028049167 +-1.8349861822915732 0.6760914075617628 +-1.071306690749041 0.7792545241083699 +-0.8881243012357684 -0.6056198942668756 +-2.237723974379196 2.1611172738428333 +-0.018894369625641838 0.5969921143765888 +-2.387750801729662 1.4672482530916278 +0.09802057570002798 0.1765285706981085 +-0.6501011361815541 0.792595843392131 +0.7276557327805552 -1.6848830159723085 +0.21325375711745442 0.16100338129720632 +-1.6597942646220065 1.5179509564909786 +-2.0722529946620707 1.3765075742724835 +-0.07277362396888709 -0.3562192516970077 +-1.043958808396423 0.5533198594757474 +-1.2739941937107035 -0.1565298357176158 +-1.5227871920744973 0.10248629303135809 +-2.368592597276733 2.0952350884027116 +-3.3167308471020744 3.271955040064329 +-0.303685123112054 -0.910573617021925 +-0.36737074236914075 -0.8256777013404573 +-3.6185880312622194 2.287328920412277 +-2.6844067608221502 2.547431373099907 +-1.4059485855936935 2.9562118652125546 +0.29442647488631624 -0.9554794202558861 +-0.07722340486711965 -2.219327075410237 +-0.12176588157105783 -1.0589587446851538 +-2.3803227869409262 2.5433850441013175 +-1.4390547636644426 -0.5754854596711674 +-2.437895925859793 2.177063168438347 +0.19234989976109185 -0.4190444016378298 +1.1085423258054448 -2.1712118471968873 +-1.0592752721072805 1.5253206775134591 +0.5216351169872825 -0.2992611163387405 +-0.30750300665988084 -0.18565854490628714 +-2.4613399445188326 1.1982845743697006 +-0.5524047415483111 0.7247745875524709 +-2.3411222031239145 0.1016114241308253 +0.06737702306661342 -1.0863377955471132 +-1.3159108301998486 1.973020464956151 +-1.6390633606400293 1.6863973305400044 +-1.504557812048716 1.9012177087383104 +0.6340968623436088 -1.1619120359384825 +1.2260030681090321 -0.4679365367595117 +-1.0398767513743932 -0.7372661330275871 +-1.1565585015330069 0.1286944217328626 +-2.5397614977198746 2.7984809309641556 +-1.9482496579126565 0.9612323700630603 +0.7812097493507554 -1.775167886499378 +1.1241523333863346 -1.518461640397563 +-0.4484264777143786 0.362519095474473 +-1.3570978455198954 1.0478924898676105 +2.1973476638361547 -1.7182406921801254 +0.8592277182066957 -2.111089730204871 +-0.8329851501902635 -1.198631899822378 +-2.2096178576299805 2.1628160911584726 +-0.9885986193973709 -1.3229143791794777 +-0.415750787540603 0.8559479030358232 +0.5519372481225565 -2.1915909603275923 +-0.6077182822633456 0.3367157002357998 +-1.8611247248280112 1.480665599619503 +-2.0114887109942394 0.9719802061574632 +-3.3556369039702902 1.8568566999705047 +-0.19037462352660361 0.002124789251043535 +-1.5505013987228566 -0.007993264701919134 +-2.0207772293041706 0.4275589478177331 +-0.3659922919427886 1.1233700519250527 +-0.49810897144644495 -0.2172318571436816 +-0.3021308691529622 0.4755208300710817 +0.9113874263166158 -0.7110088260101957 +-0.7415668575797917 0.418028601868779 +-1.294020005375891 1.195170972443725 +-1.3071738762834377 -0.28236998979963684 +-1.1390297665615232 -0.1996449113853826 +-1.4093923505941883 -0.40258882644431165 +-1.1681232250876066 0.7774047302576421 +-0.1918645242538103 -0.13843534554521458 +2.5882808443193492 -2.156215185721792 +-1.4028104754562265 0.6181758088478968 +0.184184751791575 0.5611289631083138 +-2.9327325408571445 2.9133308346346936 +0.08315972554148832 -1.898909243392385 +-2.2363221544324907 1.9093167218343152 +0.36200553088254217 -1.8809971890739647 +-0.022474962315221503 2.559043226936496 +0.3764742394310977 -0.6645453510593302 +-1.651689539712668 1.3724007909033173 +-2.2632862787372843 3.7434089913988635 +-2.1176296660121054 1.0092792528857852 +-1.6977234791556204 -0.34802192062351195 +-0.9594959974317989 -0.3628525443274337 +0.3711180098479462 -1.2446627469582507 +-2.0157663484759816 0.6029804199882185 +-0.36852104791789464 -0.427383001374479 +-0.4013813743708237 -0.3129616254479762 +-1.4610896097764683 1.9114299939920332 +-2.9551618702706843 2.5754070644099 +-0.18233107187688702 -0.7402636628024584 +-1.2712786691851101 0.4162692152536478 +-1.1746389323651867 0.9919663912923284 +-1.483174396227517 1.3920884051922928 +-1.3336856597608993 0.49218340814597095 +1.716865846834092 -2.964106842973959 +0.7216988723005647 -0.455664062059805 +-0.8631291323896424 0.13808695727288384 +-1.7983378347300705 0.7088801647205842 +-0.7209342504191509 0.9055470092506066 +0.524792484356852 -0.9458625890239407 +-1.9063169194259535 1.5819620453840408 +3.04709893328014 -3.4353435550918237 +-0.9281221277899454 0.9083017293703501 +-1.382849711101771 -0.1368990054198725 +-1.9465264823330566 0.9518439759211228 +-0.9454882231161968 1.4236565444272327 +-0.9874303351201438 0.12805104878611853 +0.5850871407958632 -0.9883365046406141 +-2.812441277172862 0.8632331022733191 +1.0021914192054848 -2.074237616247712 +0.08220706113729115 -1.0976013570280352 +-1.9946050209791157 2.631756555301641 +-0.04075928090122849 -1.2180937872408069 +0.34108900253130214 -0.06403436115550648 +0.46246475948358234 -1.9237675455346905 +-2.3871370126583433 0.49044093516922077 +-2.914426205330713 3.6606904986894833 +0.15365580889150232 -0.042852738272242785 +-2.3550802942070606 1.7092581022197995 +-0.39652421198648946 -2.1984499471331134 +1.0717226757193932 0.07695815850636306 +-0.34830952163541634 0.2462801110695761 +-2.9356876002832233 1.111881901882148 +-0.7749433094992322 -0.32146088023335145 +-1.4404314929934334 0.6067518834722796 +0.5729937089102178 -0.8954867149108949 +-0.9336996510664488 -0.5476755581204347 +-0.6687455911628601 0.7198456671245017 +-0.807401678371056 0.9084323672000962 +-3.6060770473804875 3.735668273153899 +-0.03556565287500402 -1.8016504167263943 +1.6029673956951629 -2.688602931210973 +-2.2960224237802556 1.027418103729649 +-0.7405194876937718 0.6241691481126012 +-1.3308776169055596 1.636952087068371 +-1.302089917416404 0.9951828390995576 +1.551316327094578 -1.7890536782948736 +-1.6092248116140901 0.9149538768380869 +0.33546457192654733 -0.9655979594991213 +-2.6491196844489493 1.8379600968317766 +-0.16449010060900338 -0.6637687451698704 +-1.2270980040172583 0.7067042171302872 +-0.6594815207128153 0.8496901523555025 +-0.22948589176150636 0.6097489347205296 +1.8290051591940708 -3.7001772742097545 +0.07279398517034774 0.04394593771501476 +-2.4219044422998555 1.7871995649419985 +-2.6919943961971873 1.6022227666524724 +-0.9371133386706608 -1.663534220862267 +-2.9217177857024312 2.346475726687829 +-0.9312527971889804 0.08877742058907473 +-2.30902537837932 2.626889989193122 +0.26851119913356325 -1.0294916267686016 +-0.22936930377360065 0.15753993192469196 +-0.2809308663311142 -0.5413811599783696 +0.044343620223997404 -0.810161324052219 +-1.4067387474828386 1.403943849164822 +-3.657500461348286 2.070115814754225 +0.5337594146478072 -1.564162846930917 +-2.7625188161668532 1.8462584002104578 +-1.7038580230798321 1.0146103068165495 +-1.6593660528707719 2.1580397138499174 +-1.3130610880194462 -0.24958321441990045 +-0.9257807513542435 -0.05651871305118644 +-0.16696879951651078 0.22503003936037413 +-2.474642482274762 2.9785195701303113 +-0.10900714415710955 1.691983100538058 +-1.59685636864904 0.2509227419719387 +-1.4305048887811278 0.6740300622588222 +-0.3086752658701465 -0.46452244379041113 +-1.7249792383351692 -0.4471389264896966 +-1.1691317300526092 0.24591856241987287 +-0.1253938644032867 1.0891570315513708 +-1.319902728278473 1.4398379573395528 +-2.2151950004469625 1.3436841139253906 +0.6053237617967558 -0.21766804499804415 +-0.9095506539645217 0.05734105190784522 +-0.5714747740262192 1.437802315479472 +0.11956972885313455 -0.403008207261132 +-1.3923479135541252 0.8174750322897169 +0.914259092182412 -0.9275430463915706 +-1.6141697373454562 1.6987143631390962 +1.0442716562942107 -1.584254286056931 +1.4852147200257422 -1.9430477664777488 +0.17179105603863631 -1.0077044489568223 +-1.9588205962141805 1.7036157070458962 +0.2700636878901911 0.18097190069737706 +0.6527065040095258 -2.3027406369547068 +-1.5901529000206982 1.199193876898921 +0.8560114359594869 -0.017165389148233567 +0.6468024357527665 -0.8371565526529583 +-1.0048156472149299 0.9178779466581004 +-1.2429460169683502 1.403714903348674 +-1.4481022741717697 1.833247990292814 +-0.5940298329077971 -0.0542493396926505 +0.23047855416325203 0.5244859500418781 +-0.25944878127533477 -1.2301137625229013 +2.30038794622358 -2.581054644370049 +-2.5816691683218616 2.0155631556647267 +-0.5021353198633421 0.2933908280525005 +0.6986277261315276 -0.2728578617344843 +-2.294783957400028 2.336265539447256 +-3.2955739745216768 3.453810484102476 +-1.4581439354918624 1.5889112156838203 +0.9473446785272245 0.6449674929389766 +-0.6357713128844177 0.5036107479219498 +2.2730104684456816 -3.211293557909758 +-2.3307983805959966 1.072165703379064 +-0.06505765923381313 -0.6621769169026264 +-0.3610266349513723 -0.46698576670814634 +-2.5997195003736935 1.956733475857548 +-1.118725271072219 0.3793625032728135 +0.36353693341966975 -0.8510798294631994 +-0.7051434670713345 -1.1372384737395014 +-1.178254496464694 -0.255359607528231 +-0.7116043799137721 -0.8239119148033416 +-0.8237668436694978 1.2106470379060945 +-2.8644318257053647 1.9097330104345405 +-1.259673967684284 0.6093301468329866 +-0.8744098838173215 0.5552532749112953 +-3.525899272149446 2.0778816976630985 +0.13024076648697647 -0.16550714365109842 +-2.9803998954912405 3.558809597993026 +-0.5378911176156982 0.046444317977483746 +0.9458614402525259 -0.21585057198232932 +-0.47339424667998137 0.7965257706347297 +-4.162479883695737 1.6814944647241665 +-2.2166572314164457 2.864385677503266 +0.21371079054175068 0.12123781098157732 +1.09526643532105 -1.9358498674460214 +-1.7595619906835358 2.0728307062494866 +-2.923929715971316 4.211421759287125 +-5.077703076363971 5.3720961633229285 +-0.3918675511645583 0.1274886160789138 +-0.931919627192142 -0.026700129379946258 +-0.47855269549790314 0.3132634828540052 +-0.2568127039206485 0.640652603840866 +-0.5329072287260205 3.37016745073929 +-0.9700866738661065 1.4792070471801302 +-0.6902554166689251 0.32825874277391176 +0.2803592098158726 1.1507971442819112 +-0.6394910251043575 0.5940365830228016 +1.1383813763699804 -0.26452751391307633 +-0.7401514555148115 -0.051550124540736375 +-1.5776748957315538 0.6296527441065978 +-1.5486805909223778 1.2790779773086973 +-2.2049232860521277 4.286225053010507 +-0.6255808007526087 -0.08947013082942623 +-0.2415921174626987 0.8561828609318041 +0.10425724943738635 0.048568182736229815 +-0.442035194309881 0.6656801333215281 +-0.8853458249051832 0.4548589446587683 +-3.2665948527369815 2.6764129194131807 +0.7553491394736535 -0.4028000577177136 +0.6674181570830878 -0.28675589020278186 +0.6220062571661477 -2.2527938532237637 +-1.6021229758301447 0.40337667978736536 +0.25261729142915545 -0.6633368188935584 +-0.21451319950888215 0.4312521200830138 +-3.2929728252535213 3.3551756593481135 +-1.539411059842092 0.09839070343644579 +-1.9304414006089967 0.9606628220991635 +-2.9238183913861224 2.1689715248514942 +1.4612861069804306 -0.9224213822060183 +0.6792185837855934 -1.853016143282606 +0.8961386609935321 -1.728667098958664 +-0.8147097246935345 -1.1012503960232425 +-2.298935747388408 1.6073327025713402 +-1.0628538785118715 1.0458424531090762 +-1.047454288745907 0.5544820342196286 +-2.02514638118655 0.34135036177911254 +-1.8136863559960514 3.2127091506398253 +1.3685585527055055 -2.232711281064556 +-0.24349274427741552 -0.1755301619165724 +-0.4372190612741341 1.4174305669007787 +-1.6779092992190816 0.9742152285313599 +-2.3187735022219766 1.2338781035915019 +-1.4828305243438318 -0.3863145375437717 +-0.3074839047412436 0.5099880775454284 +0.022190872469583622 -0.4748126483878376 +0.6465178833806379 -1.8126548451736233 +-0.8250362250286507 0.6887564135548738 +-0.644303237985793 1.0405951634332131 +-1.5498732453687118 2.7736204617626523 +-0.4145673267191885 0.994134647313605 +-1.998914224023716 1.311201111185617 +0.8187332562462387 -1.1614857008277972 +-1.3702127415155005 1.58974118598541 +-2.504851039494417 0.4290699729979059 +-2.0093297592696073 0.41863542084664734 +-1.1676009688475175 0.5047581569303219 +0.5788775545440367 -1.7004780810549889 +-0.9791610680112406 0.6514758458460868 +-0.9397306759348258 0.6012460064207059 +3.241799151675351 -3.3279715716956435 +0.11045672213837454 0.1278561249872484 +-2.959765992815165 1.022864221299231 +-0.7654766564356874 0.7547981448075545 +-0.6873732432365521 -0.14712474361273697 +0.6337489002081728 -0.3384421687200499 +-0.5184542655603622 -2.0499563025392176 +-2.0199796964479564 2.097287814588361 +1.5076431219835857 -2.15571418697066 +-2.9994460169915933 0.4738195015292677 +0.7200082751814231 -0.009116396252029146 +-2.2206927049735476 2.842108379516185 +-1.2109111873935356 1.5890554411739002 +1.0590757222037626 -1.8221433992759937 +0.2871274869963728 0.624126491762071 +0.31353066897264525 -2.1561455611892013 +-2.3906959642278305 1.0653470389305517 +-0.4543531356245953 0.3472682672579191 +-0.18054686560100752 -0.39291154664353095 +-2.427196123079578 1.8974478035625284 +0.6969539874006229 -1.2011027710620712 +0.4522703379314269 0.9034785514771253 +-2.03789494406488 1.0072434741294265 diff --git a/inst/dev/datasets/cec/bigData_1/input.txt b/inst/dev/datasets/cec/bigData_1/input.txt new file mode 100644 index 00000000..e69de29b diff --git a/inst/dev/datasets/cec/cec_txt_to_RData.R b/inst/dev/datasets/cec/cec_txt_to_RData.R new file mode 100644 index 00000000..8bb8a7d9 --- /dev/null +++ b/inst/dev/datasets/cec/cec_txt_to_RData.R @@ -0,0 +1,146 @@ +rm(list=ls()) +rm(.Random.seed) + +#EllipseGauss +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "EllipseGauss","energy.txt", package="gmum.r"))); +cec.ellipsegauss <- as.matrix(read.table(system.file("dev", "datasets", "cec", "EllipseGauss","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "EllipseGauss","cluster.txt", package="gmum.r"))); + +typeof(energy); +typeof(cec.ellipsegauss); +typeof(cluster); + +cec.ellipsegauss.extra=list("energy"=energy, "cluster"=cluster) + +rm("energy", "cluster") +rm(.Random.seed) +save.image("cec.ellipsegauss.RData"); + +env <- c(ls()) +rm(.Random.seed) +rm(env, list = env) + +#mouse_1 +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1","energy.txt", package="gmum.r"))); +cec.mouse1 <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1","cluster.txt", package="gmum.r"))); + +typeof(energy); +typeof(cec.mouse1); +typeof(cluster); + +cec.mouse1.extra=list("energy"=energy, "cluster"=cluster) + +rm("energy", "cluster") +rm(.Random.seed) +save.image("cec.mouse1.RData"); + +env <- c(ls()) +rm(.Random.seed) +rm(env, list = env) + +#mouse_1_classic +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","energy.txt", package="gmum.r"))); +cec.mouse1.classic <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","cluster.txt", package="gmum.r"))); + +aproximation <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","aproximation.txt", package="gmum.r"))); +dimension <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","dimension.txt", package="gmum.r"))); +iteration <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","iteration.txt", package="gmum.r"))); +type <- read.table(system.file("dev", "datasets", "cec", "mouse_1_classic","type.txt", package="gmum.r")); + +typeof(energy); +typeof(cec.mouse1.classic); +typeof(cluster); + +typeof(aproximation); +typeof(dimension); +typeof(iteration); +typeof(type); + +cec.mouse1.classic.extra=list("energy"=energy, "cluster"=cluster, "aproximation"=aproximation, "dimension"=dimension, "iteration"=iteration, "type"=type) + +rm(.Random.seed) +rm("energy", "cluster", "aproximation", "dimension", "iteration", "type") +save.image("cec.mouse1.classic.RData"); + +env <- c(ls()) +rm(.Random.seed) +rm(env, list = env) + +#mouse_1_spherical +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","energy.txt", package="gmum.r"))); +cec.mouse1.spherical <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","cluster.txt", package="gmum.r"))); + +aproximation <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","aproximation.txt", package="gmum.r"))); +dimension <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","dimension.txt", package="gmum.r"))); +iteration <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","iteration.txt", package="gmum.r"))); +type <- read.table(system.file("dev", "datasets", "cec", "mouse_1_spherical","type.txt", package="gmum.r")); + +typeof(energy); +typeof(cec.mouse1.spherical); +typeof(cluster); + +typeof(aproximation); +typeof(dimension); +typeof(iteration); +typeof(type); + +cec.mouse1.spherical.extra=list("energy"=energy, "cluster"=cluster, "aproximation"=aproximation, "dimension"=dimension, "iteration"=iteration, "type"=type) + +rm("energy", "cluster", "aproximation", "dimension", "iteration", "type") +rm(.Random.seed) +save.image("cec.mouse1.spherical.RData"); + +env <- c(ls()) +rm(env, list = env) +rm(.Random.seed) + +#mouse_2_spherical +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","energy.txt", package="gmum.r"))); +cec.mouse2.spherical <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","cluster.txt", package="gmum.r"))); + +aproximation <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","aproximation.txt", package="gmum.r"))); +dimension <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","dimension.txt", package="gmum.r"))); +iteration <- as.numeric(read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","iteration.txt", package="gmum.r"))); +type <- read.table(system.file("dev", "datasets", "cec", "mouse_2_spherical","type.txt", package="gmum.r")); + +typeof(energy); +typeof(cec.mouse2.spherical); +typeof(cluster); + +typeof(aproximation); +typeof(dimension); +typeof(iteration); +typeof(type); + +cec.mouse2.spherical.extra=list("energy"=energy, "cluster"=cluster, "aproximation"=aproximation, "dimension"=dimension, "iteration"=iteration, "type"=type) + +rm(.Random.seed) +rm("energy", "cluster", "aproximation", "dimension", "iteration", "type") +save.image("cec.mouse2.spherical.RData"); + +env <- c(ls()) +rm(.Random.seed) +rm(env, list = env) + +#simple_1 +energy <- as.numeric(read.table(system.file("dev", "datasets", "cec", "simple_1","energy.txt", package="gmum.r"))); +cec.simple1 <- as.matrix(read.table(system.file("dev", "datasets", "cec", "simple_1","input.txt", package="gmum.r"))); +cluster <- as.matrix(read.table(system.file("dev", "datasets", "cec", "simple_1","cluster.txt", package="gmum.r"))); + +typeof(energy); +typeof(cec.simple1); +typeof(cluster); + +cec.simple1.extra=list("energy"=energy, "cluster"=cluster) + +rm("energy", "cluster") +rm(.Random.seed) +save.image("cec.simple1.RData"); + +env <- c(ls()) +rm(env, list = env) +rm(.Random.seed) diff --git a/inst/dev/datasets/cec/mouse_1/cluster.txt b/inst/dev/datasets/cec/mouse_1/cluster.txt new file mode 100644 index 00000000..9d65fc49 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1/cluster.txt @@ -0,0 +1,5000 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 diff --git a/inst/dev/datasets/cec/mouse_1/dimension.txt b/inst/dev/datasets/cec/mouse_1/dimension.txt new file mode 100644 index 00000000..0cfbf088 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1/dimension.txt @@ -0,0 +1 @@ +2 diff --git a/inst/dev/datasets/cec/mouse_1/energy.txt b/inst/dev/datasets/cec/mouse_1/energy.txt new file mode 100644 index 00000000..659f0249 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1/energy.txt @@ -0,0 +1 @@ +3.236248 diff --git a/inst/dev/datasets/cec/mouse_1/input.txt b/inst/dev/datasets/cec/mouse_1/input.txt new file mode 100644 index 00000000..a293f3a6 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1/input.txt @@ -0,0 +1,5000 @@ +-0.995196703821421 -1.25496363360435 +-0.0916901426389813 0.656163962557912 +-1.87350478768349 -0.23391071241349 +-1.06695580109954 0.422948511317372 +1.40486230328679 0.607535423710942 +0.975851744413376 -0.395105763338506 +-1.70501101575792 -0.345204788260162 +0.445471781305969 1.0730222389102 +0.0106809297576547 -0.203755992464721 +0.909808088093996 0.828390284441411 +-0.00388195272535086 1.3346636435017 +-1.32951295375824 -0.0904841646552086 +0.0730158286169171 -0.871865409426391 +1.44894760102034 -0.652278308756649 +1.85684834141284 0.0889080679044127 +0.228373211808503 1.90131447277963 +-1.75790816172957 -0.0536259738728404 +-1.38306220807135 -0.868105077184737 +0.51602975372225 1.4199586706236 +-0.652850545942783 -1.54646616987884 +-1.39745481219143 1.31078724935651 +-0.95686946157366 1.48581963032484 +1.14921182766557 1.52665143925697 +-0.270986968651414 0.919610910117626 +-0.217338860966265 0.791304524056613 +-0.892032641917467 0.77473118621856 +1.06889025401324 0.288098360411823 +0.756842515431345 -0.130028023384511 +-0.884050730615854 1.76085028983653 +1.19070654828101 -0.312924808822572 +-0.825411354191601 1.04663596767932 +-0.107251292094588 1.3665751805529 +-0.843559680506587 0.425690008327365 +-1.45229449961334 0.582208413630724 +-0.754434091039002 0.366450735367835 +-0.421628248877823 -0.88450216781348 +0.400831395760179 1.23770162742585 +-0.633702482096851 -1.41144377551973 +-1.96956814266741 -0.244305152446032 +0.079180970788002 0.802811151370406 +0.778107046149671 1.32332514133304 +1.38102038670331 0.0967990374192595 +-0.157708936370909 -1.97173329349607 +-0.52260175626725 1.40593278408051 +-0.122337413020432 -1.82479790877551 +-0.0162488222122192 -0.141661988571286 +-1.5559622310102 0.952101739123464 +1.38134856242687 1.3500941041857 +0.816561377607286 -0.502152055501938 +0.209855956025422 1.40025302395225 +-0.388177442364395 0.610096707940102 +0.230023956857622 0.737850987352431 +0.427246564999223 -0.0185639401897788 +-1.55559487640858 0.693760653957725 +1.8553197029978 -0.411760785616934 +-1.09694796986878 0.357101268135011 +0.956102918833494 0.409820253960788 +0.746620911173522 -0.399177510291338 +1.25174430012703 1.50120619684458 +-0.686867597512901 0.821555423550308 +0.910813552327454 0.482326836325228 +-0.623715681023896 -1.78861341439188 +-0.982816434465349 0.195791008882225 +0.789584302343428 0.00602813716977835 +1.13683789223433 1.20819513406605 +1.08733913302422 1.57471813168377 +-1.40271726064384 -1.11607821658254 +-0.00301389396190643 -1.09967013075948 +0.637796778231859 -1.62535374797881 +-0.239783311262727 1.11461380310357 +-1.11420706938952 -0.413595991209149 +-1.0506158573553 -1.5809051701799 +-1.82369457092136 -0.363368852995336 +0.437365681864321 1.02388636767864 +0.802473210729659 -1.01332930661738 +-0.11170787923038 -1.79956202860922 +-0.425342719070613 0.506350408308208 +1.33317080978304 -0.781724327243865 +0.382948217913508 -0.479888954199851 +0.791917050257325 -0.0627150880172849 +-1.38120798114687 -1.28443724475801 +1.86881581507623 0.31946060154587 +-1.57915702648461 1.08622116968036 +0.993631829507649 -0.986970183439553 +-1.78958654217422 -0.102068017236888 +0.492273228242993 0.197045360691845 +-0.919379997067153 -0.0445720152929425 +-1.43366822879761 -1.2417916180566 +-1.28264018241316 -0.613676596432924 +-0.933348114602268 -0.104606903158128 +0.233941570855677 1.83359000273049 +0.565767897292972 0.311423322185874 +-0.968481618911028 1.01552596874535 +-1.10097881872207 -0.188604992814362 +1.08189496956766 -1.2427762048319 +0.189538032747805 -0.751524673774838 +1.02990228775889 -1.61914482712746 +-1.81482284143567 0.582876118831336 +-1.60509898420423 1.12917514983565 +1.08903963677585 -0.257311060093343 +-0.666572652757168 1.05757307447493 +-0.48484595771879 1.84037724137306 +-1.81513813883066 0.601336386054754 +0.0582790169864893 -0.562858243472874 +1.84193670004606 -0.503282655030489 +-1.13988032564521 -0.767220958136022 +-0.193256568163633 1.39126750081778 +0.661288068629801 0.173537191934884 +0.354036150500178 -0.81276538874954 +0.847812531515956 -1.76293712668121 +0.404423064552248 -0.225443207658827 +-1.52435009088367 -0.512567889876664 +-1.86395110562444 0.0639329142868519 +-1.44284111820161 -0.682709399610758 +0.524419479072094 -1.44421134516597 +0.928834730759263 0.288960594683886 +1.33634047303349 0.14220851007849 +0.0102299572899938 1.0648346748203 +1.57436500675976 -0.898613854311407 +-0.386278164573014 0.748897041194141 +0.650066828355193 1.60764280427247 +0.303726587444544 1.61144728492945 +1.59307530988008 -0.556245924904943 +0.475288433022797 1.59680422954261 +-0.695872453972697 1.78318608272821 +-0.287042141892016 1.82167880982161 +-0.386644029058516 0.162014314904809 +0.0326062366366386 -0.997294722124934 +1.55032970570028 -0.147106533870101 +0.950011108070612 -0.654733343981206 +-0.478762791492045 0.0729549694806337 +-0.0482001826167107 0.426158153451979 +0.0337188029661775 0.647792154923081 +-0.955945864319801 -0.270569618791342 +1.26062777452171 1.12391301244497 +0.592039856128395 1.72190866991878 +-0.746014905162156 1.45152712892741 +-0.915095649659634 1.11649783607572 +1.37725456431508 1.37886827811599 +0.3813297720626 -0.335157519206405 +0.278439777903259 0.735816269181669 +0.632945967838168 1.43321306072176 +1.06632588710636 1.36262670625001 +-0.395814121700823 1.93987057358027 +1.44985753111541 -0.599436345510185 +-0.746244986541569 0.635717991739511 +0.544959125109017 -0.168480843305588 +1.82909441459924 -0.477092448621988 +-1.3607116760686 -0.0300543326884508 +-0.473697507753968 0.402692710980773 +-1.07549113873392 -0.481608929112554 +0.0329718217253685 0.584145698696375 +-1.76141790766269 0.736522631719708 +1.15807581972331 -1.07025272957981 +0.506290155462921 1.93152305483818 +-0.245768368244171 -0.0922211930155754 +0.677772856317461 -1.48230097163469 +1.73064559232444 -0.644862728193402 +0.483068946748972 -1.89054376259446 +0.110996737144887 0.0135907810181379 +-1.70756212901324 0.401969030499458 +0.859525134786963 0.888976230286062 +0.97934581246227 -1.56658195983618 +-1.40966855175793 -0.890993287786841 +-1.35241699405015 -1.01378951594234 +1.5667303679511 0.701280384324491 +-0.343117402866483 1.93737894203514 +0.747848264873028 -0.690802177414298 +-0.931067007593811 0.406063173897564 +-0.0643969168886542 -0.834923907183111 +0.925833085551858 -1.7332753604278 +0.796140344813466 -1.3549906052649 +1.04852620791644 -0.0854982296004891 +0.870518087409437 -0.0593339577317238 +0.730402863584459 -0.681072719395161 +-1.13711747899652 0.909050683490932 +-1.51473970897496 0.0536620626226068 +1.16254081577063 -1.31728076003492 +0.605905718170106 1.47187018580735 +-0.732117407955229 1.86034337431192 +-0.852686068974435 1.72361632715911 +0.187512298114598 -0.355671167373657 +0.0766679644584656 -0.0144426841288805 +-0.783835278823972 1.03130060061812 +0.363441699184477 0.432291424833238 +0.91246041841805 1.03076292201877 +-0.874773055315018 -1.14095608890057 +0.631160539574921 -0.74690970312804 +1.58490722253919 -0.397608121857047 +-0.406172771938145 -1.52734799031168 +-0.99266345705837 0.0426328163594007 +-1.33452335558832 -0.243535051122308 +-0.475532999262214 -0.643190966919065 +-1.05590285919607 -0.544152981601655 +-1.55349085200578 -1.0807415228337 +0.133797239512205 1.15950576402247 +1.06400329619646 -0.0136996787041426 +1.36356160230935 -0.760975082404912 +-0.825202164240181 1.31515564210713 +-0.35720138810575 -1.38241032138467 +0.000469357706606388 -1.02691410295665 +0.706645418889821 1.31712957564741 +-0.497425246052444 -1.84492974448949 +-1.47593727521598 1.21896158251911 +0.761808474548161 -0.187922099605203 +-0.629103234969079 -1.38337221834809 +1.22742733824998 -0.732372306287289 +0.159486372023821 0.749897128902376 +0.356571141630411 -0.451895751990378 +1.52863913495094 0.380299499258399 +1.22944219876081 0.965759473852813 +1.20261873491108 0.444869204424322 +0.355163857340813 -1.51963828969747 +-0.903908772394061 -0.614233963191509 +0.150213165208697 1.76738443225622 +0.576563728041947 1.28240336664021 +-1.20272892341018 0.7913110377267 +0.34545902069658 1.60247808042914 +1.86041165143251 0.38595465477556 +-0.579973080195487 -1.02055521868169 +0.915170822292566 0.276456887833774 +-0.982072037644684 1.00495856627822 +0.913146806880832 0.37110261246562 +-0.414419451728463 1.73020204808563 +1.30078400205821 -0.709462811239064 +1.15081885457039 -0.0685763228684664 +-1.45178655348718 0.582299673929811 +0.0227826042100787 0.995403866283596 +0.658904138952494 0.202628853730857 +0.0609382968395948 1.32589694764465 +-0.844787294976413 -0.173397280275822 +1.82142582442611 -0.352384633384645 +-0.315357865765691 0.885712916962802 +0.0869441032409668 1.72215619683266 +-0.259702879935503 0.418586051091552 +0.853420538827777 0.0864836778491735 +1.60403865016997 -1.07754975836724 +1.42175541352481 0.324495150707662 +-1.06668650731444 1.22129549831152 +0.4544298350811 0.192171584814787 +0.600602254271507 0.401547293178737 +-0.463377069681883 -0.755990738980472 +-0.283635885454714 -0.472287621349096 +-0.194521699100733 0.340083898045123 +1.04017546027899 1.00489298813045 +0.504722838290036 -0.678866997361183 +-1.08173288311809 -0.0458888905122876 +-0.816141354851425 0.223551651462913 +0.961343898437917 -0.717595842666924 +1.3348745200783 0.287157875485718 +-0.320920492522418 0.162271776236594 +-0.765279618091881 -1.33912028744817 +0.73038284573704 0.444527387619019 +-0.457750655710697 1.06245605554432 +-0.739184868521988 -0.572864353656769 +-1.71385939233005 0.904077436774969 +1.3088611131534 -0.577693889848888 +0.721107012592256 -0.241964974440634 +0.853931662626565 0.804113106802106 +-0.0703866695985198 0.238781863823533 +-0.275603499263525 0.759435653686523 +-0.531817354261875 1.68647861387581 +1.26718869898468 0.398671532981098 +-1.36903159599751 0.160678334534168 +-0.0309905232861638 -0.898300776258111 +-0.91533896047622 -0.975059447810054 +1.69905540905893 0.139202923513949 +-0.757232335396111 0.742849911563098 +-0.0437584705650806 -1.41829698067158 +0.491516563110054 0.375375173054636 +-1.38399440888315 0.27180060185492 +-1.98093810304999 0.0487158698961139 +0.90674595721066 -0.00384219363331795 +-0.164458590559661 0.607807497493923 +0.716516653075814 -1.48044554982334 +-1.20881435554475 -0.747844932600856 +0.607536057941616 0.572839183732867 +-1.70509478822351 0.493106203153729 +0.750138824805617 0.506564995273948 +1.44535682629794 -1.26503456756473 +-0.83621723856777 0.301373273134232 +1.49973416142166 0.821444384753704 +0.979218731634319 1.36041244864464 +-0.128170259296894 1.85705869179219 +1.0722393207252 0.213939391076565 +0.753036223351955 1.62214121967554 +-1.64602892659605 0.518038483336568 +0.441066613420844 -0.046219184063375 +1.15532006882131 1.58225478697568 +0.591147957369685 -0.201091900467873 +1.70242683775723 0.170697254128754 +-0.422395584173501 1.65241691470146 +0.721141584217548 -0.487911244854331 +0.692002985626459 1.16408439353108 +0.63427403010428 -0.585004523396492 +0.237150836735964 1.31623688992113 +-0.667902288027108 0.670168127864599 +-0.847749352455139 -1.5959488498047 +0.587787348777056 1.67052134312689 +0.24984907079488 1.58007201179862 +0.379195890389383 0.462371625006199 +0.43553117942065 -0.172134504653513 +0.829223842360079 -1.35730029083788 +0.372082722373307 -0.695753683336079 +-1.64209248311818 0.447460251860321 +-0.393174621276557 -1.94799479749054 +1.15298049524426 0.573091526515782 +0.0682687796652317 1.63207322265953 +0.333458595909178 -1.00889765284956 +1.21066868584603 0.620449020527303 +1.79950367845595 -0.0852862540632486 +-1.96868795808405 -0.000750313512980938 +0.591576127335429 -1.66582980193198 +-1.23687919881195 -1.2849373659119 +-0.195623979903758 -0.307801141403615 +1.01727752387524 -0.35432665143162 +1.64721434190869 0.480085669085383 +-1.38395196385682 -1.18820938281715 +-0.0533513529226184 -0.667771198786795 +-0.116332152858377 -0.974136655218899 +1.08934413362294 0.543126857839525 +-0.804746301844716 1.57552240695804 +0.857653864659369 -1.32032788731158 +0.0628705322742462 -0.32741598598659 +0.346876329742372 -1.18718007206917 +0.0299704354256392 -0.699587830342352 +1.25400892086327 1.46717280708253 +-0.390970905311406 0.702818472869694 +-0.0220326576381922 0.740375788882375 +-0.0876873405650258 1.2867888584733 +-1.53994850907475 1.17058638576418 +-0.02047014888376 1.02829579263926 +1.51675029564649 -0.819550946354866 +-0.702962211333215 -1.0589279923588 +-0.113342059776187 -0.332582891918719 +0.29223770275712 -1.40752658713609 +-0.922058136202395 -1.39388046786189 +-1.797926325351 -0.522728119976819 +-1.07602418400347 0.194256991147995 +1.72138263005763 -0.539326117374003 +0.307852629572153 -1.92473651189357 +0.250246435403824 -1.93745632935315 +1.26496123708785 -0.583435721695423 +-1.41688238549978 -0.736354641616344 +-1.82111192308366 0.312675296328962 +-0.560956492088735 -1.67170381080359 +0.613825005479157 0.0629289224743843 +0.669923468492925 0.209714718163013 +-0.570493366569281 -1.50841156113893 +-0.0802411194890738 -1.12397055700421 +-1.78804942779243 -0.525498958304524 +-1.04474954772741 1.05813085380942 +0.407633641734719 -1.57522692251951 +-1.31223152950406 -0.168557421304286 +-0.414437154307961 0.047677525319159 +1.22181331925094 0.494973625987768 +-0.850262119434774 0.661039061844349 +-0.672423167154193 0.807273066602647 +-0.137605521827936 -1.04581622965634 +0.993412005715072 0.22675542999059 +1.27229198161513 1.11720008775592 +-0.220931774936616 -0.581188298761845 +-1.25141715630889 0.421482088044286 +-1.87348592095077 -0.593115178868175 +1.76890220772475 -0.499444686807692 +-0.479615177959204 0.0940467864274979 +-1.19890473317355 -0.85904941521585 +0.351398186758161 0.764224844053388 +-1.67545071803033 0.780228747986257 +-0.495106742717326 0.0437783496454358 +-1.30949800554663 -0.212736347690225 +-1.20871382206678 1.49904653057456 +-0.553403533063829 0.165829638950527 +-0.957269532606006 1.59991280455142 +1.44047755561769 1.29077435936779 +0.321877646259964 -0.97382306586951 +-0.255085550248623 -0.87013950292021 +0.829005802981555 -0.620201685465872 +1.23389742895961 -1.41849923692644 +0.738880936987698 1.37690252438188 +-0.632375093176961 -0.916502872481942 +-0.0286113563925028 -0.154857690446079 +1.73621740657836 -0.511664731428027 +-0.916096171364188 -1.17853697109967 +0.55138814356178 -1.63894927222282 +1.77955648582429 0.907634264789522 +-0.5684330817312 1.09478843584657 +-0.752216276712716 0.964104150421917 +-0.197858620434999 0.636141213588417 +-0.727648154832423 -0.269814874045551 +-1.60112897958606 0.979748252779245 +-1.11942855734378 -0.288902945816517 +0.685074288398027 1.21277574915439 +-0.620083078742027 0.582455884665251 +0.823259767144918 -0.57748431712389 +1.76741660106927 -0.446372795850039 +0.291558029130101 -1.31178698316216 +0.308595130220056 0.694930983707309 +0.560215827077627 1.25973988883197 +0.483300342224538 -0.00472699198871851 +-0.45598610676825 0.950894927605987 +-1.60600179806352 -0.542930297553539 +-0.0876334607601166 -1.06379523687065 +0.351809175685048 -1.73245886620134 +-1.41180249862373 0.0843833386898041 +-1.57671115268022 0.0716482307761908 +1.17502567265183 0.814884508028626 +-1.53912263270468 -0.525534112937748 +-1.22338424809277 -0.427679716609418 +1.00329688470811 -0.426361828111112 +-1.29688999895006 -0.304181425832212 +-0.715584401972592 -1.8599852938205 +-0.22746594902128 0.376712105236948 +1.32915029209107 0.936184713616967 +-0.11372818145901 0.493998927995563 +0.828684665262699 -0.342130715027452 +0.868047717958689 1.49403645657003 +-0.631900665350258 0.279637476429343 +-0.482001221738756 -1.20983183570206 +-0.649166981689632 1.85354051366448 +1.43620274960995 -1.14997111167759 +-0.606179953552783 1.12266394868493 +-1.27990800607949 1.02254690602422 +-0.768104788847268 0.407318857498467 +0.542685781605542 1.01192342303693 +-0.376549375243485 -1.27918440941721 +0.261440098285675 -0.522354177199304 +-0.793426035903394 1.64805996138602 +-1.01039612013847 -1.05727389361709 +-0.826220270246267 -0.812121217139065 +-1.72349250223488 -0.819835059344769 +1.32071771472692 -0.757125933654606 +1.44636637624353 -0.559060551226139 +-0.270491316914558 0.780914107337594 +0.942553806118667 -1.61968243401498 +0.490359977819026 0.175696374848485 +-1.21363835595548 -1.50189186166972 +-1.4809550344944 0.0303114289417863 +-0.506483576260507 1.40262608323246 +1.12178872805089 -0.381912122480571 +-0.257970767095685 -0.575632422231138 +-0.00289685465395451 1.58183397259563 +0.238144697621465 1.94041347038001 +-1.41624204069376 1.29334435611963 +1.22665872517973 -1.31291010975838 +0.912436644546688 -0.75897590816021 +-0.745916584506631 -1.84381539467722 +1.11840979382396 -0.786607567220926 +0.900011729449034 1.0096855070442 +1.44663174636662 1.04091777652502 +0.544355107471347 -0.954065293073654 +0.564979068934917 1.61356099229306 +1.32590707857162 1.34049807209522 +1.5490837180987 -0.689748236909509 +0.733988702297211 0.753903762437403 +1.23249430954456 0.388727645389736 +-0.296393970958889 1.09064030367881 +-0.540357092395425 -0.973244334571064 +-0.788170226849616 -1.64432818628848 +-0.0944742588326335 -0.936368046328425 +0.598996311426163 -0.0997252138331532 +-0.00675559975206852 -1.83425580151379 +0.924498545005918 -0.275932276621461 +-1.2932168180123 0.600026908330619 +1.3167002722621 0.914813292212784 +-1.96645679417998 -0.067542870528996 +-0.682630127295852 -0.794948709197342 +-0.51025452837348 -1.48776744026691 +-0.252483198419213 -0.147548977285624 +-0.504221674054861 -0.0837126420810819 +-0.830971983261406 -1.58405822794884 +0.214458921924233 1.06290440075099 +1.93335065431893 -0.11440647393465 +-1.8019334031269 0.586789389140904 +-0.108518383465707 0.988536140881479 +-0.989881100133061 0.740251109935343 +1.08452728576958 -0.288853840902448 +-0.4770527491346 -1.24104701261967 +0.701310622505844 -0.785819599404931 +0.875707213766873 -1.15752016287297 +1.79373591952026 -0.221989513374865 +0.954352248460054 1.46076401695609 +0.75296959746629 0.66190648637712 +0.636155346408486 -1.52012897655368 +0.925197014585137 -0.168484514579177 +1.27209129091352 -0.276470823213458 +1.87965037487447 -0.184916377067566 +0.332837811671197 1.90052188001573 +-0.311062903143466 1.31827921792865 +0.501109107397497 -1.27190152183175 +0.623796383850276 1.45393801480532 +0.817373494617641 1.60680463165045 +-1.90414165705442 0.0970499198883772 +0.339317447505891 1.47194044198841 +-0.0249521927908063 0.282234505750239 +-0.390010843984783 -1.72263861168176 +1.41731756925583 -1.11078944336623 +-0.517580762505531 -0.551634090021253 +0.154987837187946 -1.88271338026971 +-0.0905454270541668 -0.431966208852828 +-0.468168769963086 0.408769235946238 +0.306737616658211 0.595852330327034 +-1.71296369843185 0.539225436747074 +-0.796839107759297 -1.14879002701491 +-1.56317670922726 -0.811828915029764 +-0.268728219904006 -0.999720081686974 +0.448994481004775 -0.938615296036005 +-0.567586415447295 1.74906764831394 +-0.793734580278397 0.960451711900532 +-0.882175708189607 -1.3242200948298 +0.781728336587548 1.39071185328066 +-1.3803644515574 1.19413817767054 +-0.261699116788805 0.793343534693122 +-1.79685416258872 0.423516943119466 +0.392752758227289 -0.832021122798324 +0.678052538074553 -1.40096440073103 +0.12542415689677 -0.559644509106874 +0.616746717132628 -0.365370739251375 +0.774660338647664 0.126668275333941 +0.453816154040396 -0.513516636565328 +-1.19163580425084 -0.130759342573583 +0.278563930653036 -0.444839341565967 +0.209865664131939 1.58955016359687 +1.34161614719778 -0.791467852890491 +-1.04149808175862 -0.499100076034665 +0.556505551561713 -1.1561855841428 +0.149208105169237 0.644109507091343 +0.356274682097137 -0.961615364998579 +1.38724648859352 -0.198336802423 +0.896234601736069 0.659317283891141 +1.52315654512495 0.740899014286697 +0.43576249666512 0.794330444186926 +-1.01106981933117 0.938952726311982 +-1.26866808719933 -0.818602168932557 +1.11798250768334 1.61305670067668 +0.849557702429593 -1.13184234965593 +-1.40904108714312 -0.213475726544857 +1.14633041713387 -1.05692435521632 +-1.08425577078015 0.365145798772573 +1.02695600129664 -0.325761573389173 +1.59679679386318 -0.814085936173797 +-1.23649073205888 -0.367967562749982 +0.220890241675079 -0.878464560955763 +-1.06734808068722 -1.17382433917373 +-0.852742278948426 -0.392052442766726 +-0.378363746218383 -1.89135291799903 +-1.21231690701097 0.318362248130143 +1.35049880295992 0.268007177859545 +1.06291964836419 0.962191699072719 +1.82223021332175 -0.594979349523783 +0.137134587392211 0.911599446088076 +0.594827631488442 -0.388643681071699 +1.07708888500929 -0.716147340834141 +-0.406276747584343 -0.451570248231292 +-1.51391019951552 -0.0239889184013009 +-1.87794831767678 -0.486345504410565 +-1.73895852081478 -0.0217026583850384 +1.97836993169039 -0.178526701405644 +-0.728922543115914 -1.78483736142516 +1.30049590207636 0.370156050659716 +-1.14046424813569 -1.34724021609873 +1.12852423638105 -1.02650871314108 +-1.98472322057933 -0.121367597952485 +0.765441933646798 -1.4139340268448 +1.10117118991911 -1.16767440084368 +0.94524482358247 0.525191431865096 +1.74079136550426 -0.0251013673841953 +-0.758869564160705 0.655899205245078 +1.10107791051269 0.155323226936162 +1.23302862886339 -1.51947755366564 +-0.0594671666622162 0.830685822293162 +-0.852360292337835 0.228810022585094 +0.688725728541613 -0.794826379977167 +1.05971439182758 0.210362468846142 +1.33918002713472 -1.15134960971773 +1.78231618274003 -0.3563192198053 +-1.28708548843861 -0.684198774397373 +-0.663228679448366 -1.31339746899903 +1.38450358901173 -0.366691569797695 +-0.731037746183574 1.52629584539682 +-0.425025390461087 0.877398043870926 +-0.977823570370674 1.15488017816097 +-0.320926081389189 -0.0334570184350014 +-1.58121729083359 0.192161289043725 +-0.00986555591225624 1.43039553333074 +-0.0281295906752348 0.783299225382507 +-0.583709090948105 1.74676360283047 +0.374678557738662 1.45065164938569 +0.582044757902622 -1.60773043055087 +0.216406075283885 0.502141275443137 +-0.245276777073741 -0.876435091719031 +1.39757828507572 -1.25646257027984 +-0.311926144175231 0.87857149541378 +0.519969590939581 -1.00910956691951 +-0.354108500294387 0.11264288239181 +1.93228295538574 0.134798089973629 +1.71972737554461 0.195147489197552 +-1.04625585954636 -1.30344213172793 +-0.792044771835208 1.34862129855901 +1.60052208043635 0.0818070881068707 +0.267362158745527 1.06154465302825 +-0.703058953396976 -0.641511498950422 +0.441870021633804 -0.235695442184806 +1.48828303813934 -0.699696277268231 +-0.219941417686641 -1.71355576254427 +0.485049597918987 -0.757615087553859 +0.706164319999516 0.283175245858729 +-0.642882383428514 -0.40648097731173 +1.10872698016465 0.796712150797248 +-0.195834828540683 1.83427895698696 +-1.12274315860122 -0.0412641698494554 +0.4439089987427 -1.14885572902858 +-0.512105271220207 -1.68691247422248 +0.90565584693104 1.58465478010476 +-1.07282481715083 -1.57508912682533 +-0.694312332198024 -0.498854285106063 +0.590095866471529 -1.04922886472195 +-0.498478229157627 -0.952593070454895 +-0.253517433069646 -1.54190137516707 +0.559927019290626 -0.143830716609955 +0.250342267565429 1.23514349199831 +0.228466831147671 -0.611298280768096 +0.09167171549052 1.73523396067321 +-0.527496029622853 -0.171380948275328 +1.03279994428158 -0.181355115957558 +1.48542582709342 0.283870436251163 +-0.963775586336851 0.601581461727619 +-0.750591809861362 1.07582488935441 +-1.45170483458787 -0.534700323827565 +0.293803525157273 -1.62573790643364 +1.44945732504129 0.123688877560198 +0.399974539875984 -1.05109804403037 +0.887246732600033 -1.03237203788012 +-0.288951543159783 1.24646674375981 +-0.388051544316113 1.73484283406287 +0.488534761592746 0.951098463498056 +0.480215747840703 0.881579894572496 +-0.0505791287869215 1.02275929227471 +-0.707840352319181 0.342036959715188 +-1.28837318718433 -1.50834668986499 +-0.293486379086971 -1.13973217643797 +1.00447775330395 1.61696248222142 +-1.30323924776167 -0.490860843099654 +-0.775004249066114 0.265194704756141 +1.81582325045019 0.11977204028517 +0.75037738494575 -1.57891467958689 +0.535752563737333 -1.69668704271317 +-0.393046464771032 -1.6625139862299 +-1.27257061749697 0.0962508665397763 +-0.913597752340138 0.350384428165853 +-0.206795187667012 0.592421235516667 +0.863921501673758 0.186841374263167 +-0.88743987493217 0.892267292365432 +1.15056291315705 -0.0390995535999537 +-0.569356023333967 0.929647599346936 +1.67492989171296 0.277496787719429 +-0.649300851859152 -1.84810556098819 +0.617487613111734 1.34427976142615 +1.89021142106503 -0.0152626121416688 +-0.802346526645124 1.51188640948385 +0.986171832308173 -1.60427182540298 +-0.49435297306627 1.26738746557385 +-0.160046886652708 -1.72766077890992 +-0.756241733208299 0.219447895884514 +-0.715995153412223 -1.61858461517841 +0.672325599938631 0.0442515658214688 +-0.589872688055038 -0.4027004474774 +-0.274835209362209 1.41128568351269 +0.941726870834827 0.874502433463931 +0.408902903087437 -0.0622737584635615 +0.939647984690964 1.14028832595795 +1.72099654376507 -0.358915168792009 +-0.591329172253609 0.181961624883115 +1.03022525086999 -0.1070352839306 +1.04938613716513 -0.478937746956944 +-0.0769229922443628 -0.189620168879628 +-0.141449579969049 -1.47136288136244 +0.979796284809709 0.944936042651534 +1.0996378492564 0.00745261181145906 +-1.23192721884698 -0.245140976272523 +0.730428018607199 0.023647197522223 +1.38925843499601 1.04531263839453 +1.95676803216338 0.226830358617008 +0.781166344881058 1.364467096515 +0.079519072547555 0.178218140266836 +0.694612920284271 0.611125703901052 +0.751169465482235 -0.180619758553803 +0.807643297128379 -0.941792189143598 +0.25221558008343 1.77647676318884 +-1.07766362931579 -0.49153898190707 +-0.101968491449952 -0.725006799213588 +-1.00249230582267 0.593175368383527 +0.68739399407059 -0.475800194777548 +1.63062598090619 0.397735431790352 +0.62772857490927 -0.390101407654583 +1.29407304152846 0.643951273523271 +0.86133823543787 0.216983254998922 +0.443810791708529 1.46632099617273 +1.49802983086556 -1.13360381219536 +-1.84130192454904 0.0865068333223462 +0.222034463658929 -0.292730754241347 +-1.07310153916478 -0.249676438979805 +-0.37995773833245 0.701359766535461 +1.17907647415996 1.08756245765835 +-1.74311291985214 -0.424239085055888 +1.50414034258574 0.686829648911953 +-1.00397010799497 1.44336666259915 +-0.213060956448317 -0.820534270256758 +-0.743581579998136 -1.00002472754568 +-1.66766171902418 0.322845693677664 +0.583882632665336 -1.13463944941759 +1.35154357086867 -0.247931270860136 +1.53928128071129 0.648457440547645 +1.43196803331375 -0.202075935900211 +-0.451323233544827 -1.13327734544873 +-1.73103696573526 -0.711460446938872 +0.440257336013019 1.92988932132721 +1.59201050084084 -1.01760640833527 +-0.377760978415608 1.60415078885853 +1.83779524173588 0.456652664579451 +0.413112170062959 0.0561913289129734 +-0.413687073625624 0.484198905527592 +-0.356907265260816 -0.0855501359328628 +-0.485738530755043 0.664729009382427 +-0.820996028371155 -1.02116982731968 +-0.88024515658617 -0.521311430260539 +-0.210147111676633 -1.49618704989552 +-0.874303679913282 0.920168559066951 +0.393347389064729 -0.559738446958363 +-1.20092270430177 0.659674733877182 +-1.06756016518921 1.36190843675286 +0.0112895062193274 -0.703588688746095 +-0.438855616375804 -1.90088967699558 +0.107140043750405 -0.664715316146612 +1.30801032856107 1.40742415934801 +-1.12188957352191 -0.0480707203969359 +-0.93444059137255 1.58884655125439 +0.0410056430846453 -0.31837685406208 +-0.163024418987334 -1.57893781270832 +-1.96194102242589 0.116005199961364 +1.21107841748744 0.709162976592779 +-0.431751023046672 -0.712426878511906 +-0.918708760291338 0.396516881883144 +0.112888576462865 0.0772472331300378 +0.635841563344002 0.00458787847310305 +-0.396947148256004 1.19422292057425 +0.419204328209162 -1.55299320537597 +0.239685755223036 0.762853741645813 +0.458002354949713 0.0772231062874198 +1.29572869185358 0.457871973514557 +-0.812207093462348 -1.61816033534706 +-0.0817769765853882 -0.42918548360467 +-0.802470928058028 0.07935740519315 +0.251913612708449 0.70669961348176 +1.9183174110949 0.479429103434086 +-1.30148194823414 -0.560044748708606 +-1.31182712502778 -0.804680149070919 +-0.960243111476302 -0.34322443138808 +-0.165255300700665 -0.43788187392056 +0.714948705397546 -0.762973322533071 +-0.220436902716756 0.682765134610236 +-1.8205302041024 -0.75211273971945 +1.60978524480015 -0.161189574748278 +-0.127188014797866 1.87529407255352 +0.332060892134905 -1.48442015051842 +1.51576334610581 -0.75167435221374 +-1.09945001639426 -0.629249445162714 +0.21502741239965 -1.47077091876417 +-0.608941853046417 -1.68933277856559 +-0.585262468084693 -0.29514511115849 +0.266204290091991 -0.953894814476371 +-1.35851864982396 -0.0212467042729259 +1.90117420814931 -0.0753108179196715 +0.0050139082595706 -0.750700296834111 +-1.10042622033507 -0.313181745819747 +0.355023885145783 1.85732554551214 +-1.27950146328658 -0.147860467433929 +-0.422145828604698 -1.75137479603291 +1.5993893481791 0.602640829049051 +1.85773381125182 -0.637420621700585 +0.560892798937857 -0.581320712342858 +0.278742025606334 1.38242002669722 +-0.853115337900817 0.574112996459007 +-0.190868869423866 -0.0769382938742638 +-0.165116171352565 -0.344356993213296 +1.09683755878359 -1.15818956680596 +-1.9918255424127 -0.179811556823552 +0.618806114420295 0.191013716161251 +1.56193169020116 -1.05689892545342 +0.596871755085886 -0.73604572378099 +-1.00129585899413 0.57062962371856 +-0.379584121517837 -1.0290851527825 +-0.885666695423424 -0.577968766912818 +1.34797816723585 -0.56615686789155 +0.768511346541345 -1.1913038212806 +0.8005803944543 0.864070868119597 +0.340650531463325 -0.776247174479067 +0.631405991502106 0.297200785018504 +-1.0164663689211 -1.01278818957508 +0.646895456127822 1.50238454528153 +0.409546525217593 0.471579469740391 +-0.816153317689896 0.512988162226975 +-1.02753919549286 -0.784298504702747 +-0.927840205840766 0.620804182253778 +1.12164988089353 0.10707360226661 +-0.046371758915484 1.19971232954413 +0.701655921526253 1.32212825957686 +-0.356832879595459 1.82775439228863 +-1.52479563467205 0.111589837819338 +-0.59596452768892 -1.38074446376413 +0.395030435174704 0.91293721832335 +-1.03575880173594 -0.995064412243664 +0.338460709899664 -1.22991275601089 +-0.383175962604582 0.552281611599028 +-0.219672598876059 0.484424899332225 +-1.4511819081381 -0.504403385333717 +0.53002034407109 0.421739872545004 +0.889020650647581 -1.25191920436919 +-1.95978446584195 -0.313128450885415 +-1.03943035192788 -1.08965523261577 +1.62817240506411 0.031111148186028 +-0.704638324677944 1.71463346574455 +-0.961185864172876 0.428660910576582 +-0.0861606821417809 1.25082424934953 +0.386408428661525 1.22826015204191 +-1.43504432681948 1.30909192468971 +-1.68270822614431 -0.775543991476297 +-0.857956850901246 -0.364136568270624 +0.374210217967629 -1.40925337653607 +-1.19442956335843 -0.595370506867766 +-0.899511388503015 -1.57852245122194 +0.908469499088824 1.3788707414642 +-0.00617220625281334 1.73293273150921 +-1.11317949648947 1.08492053393275 +0.625701235607266 -1.15374391153455 +1.52468740846962 -0.727162762545049 +1.37415839638561 0.833064547739923 +1.03399987425655 -1.32164746150374 +-1.78260376770049 -0.476647878065705 +0.256316269747913 -1.06104405969381 +-0.702492824755609 1.81437804549932 +1.43904528021812 1.3309427248314 +-0.0172099499031901 0.75218556728214 +-1.45742023922503 -0.199790082871914 +-0.256161381490529 -1.13713381905109 +-0.793847951106727 -0.803166904486716 +-0.136458119377494 -0.0930949589237571 +-1.34542972315103 0.945390103384852 +-1.06283725053072 0.0410007582977414 +-1.69890954997391 0.668792124837637 +-1.3601973252371 0.780823733657598 +-0.804746409878135 1.33899560105056 +0.163760173134506 0.0459132147952914 +1.85434899106622 -0.233547209762037 +-1.2702919319272 0.735746185295284 +-0.634694471023977 0.388633443042636 +-0.392248069867492 -1.18753260187805 +-1.50011022482067 0.484098610468209 +-1.94567348994315 -0.122360610403121 +-1.03422629460692 -1.6202205484733 +-1.26528355292976 -1.07396303489804 +0.754811740480363 -1.74301594868302 +1.93192242924124 -0.257676715031266 +-0.966854467988014 0.328287977725267 +1.09843036346138 0.371910732239485 +-1.35490716062486 -1.33106716815382 +-1.51862207334489 -0.686866557225585 +-0.48651438485831 -0.629265598021448 +0.369022167287767 0.195609827525914 +0.494840917177498 -0.099093159660697 +-0.476482569240034 -1.06802676152438 +-0.468235765583813 -0.0965670319274068 +-1.48674216959625 -0.40170060377568 +1.49499672930688 0.95992539357394 +-0.723048129118979 -1.00939399749041 +-0.37240878213197 -0.982082618400455 +1.83747676387429 -0.32650407217443 +-0.905915547162294 1.64695825427771 +0.986738542094827 -1.26142699271441 +1.42880681809038 -0.330877175554633 +-1.64603220485151 0.794679120182991 +1.36837930418551 1.11112566199154 +1.48445901088417 1.31937023904175 +-1.0638746926561 -1.20434158295393 +0.539998397231102 1.53734596073627 +1.88654393609613 0.155047924257815 +-1.42508859373629 0.851835518144071 +-0.103446878492832 1.76178443618119 +-0.0295209521427751 -0.86360102891922 +-0.151578824967146 1.79773118812591 +1.59392962604761 -0.531669279560447 +-0.793351737782359 -0.707396290265024 +0.246527920477092 -1.86604101769626 +0.561518276110291 -0.608455210924149 +-1.78973626717925 -0.192733026109636 +1.26125346589833 -0.615847310051322 +-0.320047378540039 -0.823965212330222 +0.0128874788060784 -1.52412779722363 +0.457905965857208 0.0558810690417886 +0.108256195671856 1.95609553344548 +1.87458338774741 0.49430369399488 +-0.488168835639954 -0.357579049654305 +0.0487927198410034 0.403576635755599 +0.932113138027489 0.842583341524005 +-1.59490537736565 -1.11855820845813 +0.227922292426229 1.03678393084556 +1.7908198563382 -0.441129578277469 +-1.42790604103357 -0.508980207145214 +1.27584534604102 0.629136627539992 +1.2043208098039 -0.23040643427521 +-1.6548686241731 0.797997076995671 +-1.89958023652434 -0.199562907218933 +1.93519985023886 -0.056813201867044 +0.902723176404834 -1.61543915234506 +-0.810146856121719 -1.47511559724808 +0.342734439298511 -0.991108144633472 +-1.7773945434019 0.858909741044044 +-0.604468644596636 0.0165389459580183 +-0.135215724818408 -0.0999999344348907 +-0.801915043033659 1.67632894404233 +0.908249096944928 -0.547333223745227 +0.53173663187772 1.55647369939834 +-1.03650221694261 0.836426875554025 +-1.23916648514569 0.179183767177165 +-1.76507868338376 -0.0679525705054402 +0.380121336318552 1.55423668306321 +-0.366139064542949 0.52283142786473 +-1.92221526615322 -0.313357527367771 +-0.347163087688386 -1.32118943892419 +-0.0693377424031496 0.31536384113133 +1.42686235532165 1.13891759421676 +1.40252769272774 1.2941824272275 +-0.707180021330714 0.191036373376846 +-1.80322033260018 -0.704412868246436 +1.66473949234933 0.141292837448418 +1.02273556217551 -1.32272014021873 +-1.8330860780552 -0.446323762647808 +-0.440534352324903 -1.51270959246904 +-0.81609810795635 1.21877565700561 +-0.469680728390813 0.574625676497817 +-0.665083475410938 1.52764136623591 +-0.453096514567733 -1.53838010784239 +1.01850017812103 -0.38438708614558 +1.47579934448004 0.825217504054308 +-1.56805564835668 -0.420923524536192 +1.56286281440407 1.19273527991027 +0.905261427164078 -1.63650351762772 +-0.833528472110629 -0.694304641336203 +-1.140221032314 1.28098183125257 +0.754631428048015 0.115728703327477 +1.1571243358776 -1.51633052527905 +-0.366214143112302 1.61110681295395 +1.4613259518519 0.787929809652269 +0.577169007621706 1.47879652213305 +0.914917451329529 -1.06987068615854 +-0.126122578978539 -1.57369254156947 +1.49636786337942 0.233571321703494 +-1.40504990238696 -1.39026981219649 +-1.02928690891713 0.451202516444027 +0.334373814985156 -0.106781984679401 +-0.496142683550715 0.172206876799464 +0.278599062934518 0.79578922688961 +1.18973087519407 0.0266219479963183 +-0.481485757045448 1.5290351966396 +-0.0456309271976352 -0.532169088721275 +0.812237264588475 1.1248732060194 +-0.0652738669887185 -1.00108504202217 +1.70992389600724 0.345755885355175 +1.09600834641606 -1.31195237766951 +0.105680937878788 0.772749657742679 +-0.367597601376474 1.55462366901338 +-1.19076022133231 -0.712582466192544 +0.359055304899812 -0.0257723778486252 +1.19316907785833 0.920618003234267 +0.208589075133204 -1.57610885426402 +-0.0520486049354076 -1.47337692510337 +1.91924328356981 -0.498732194304466 +0.588811202906072 1.48091367632151 +1.14443092606962 0.963679062202573 +-1.01143221370876 1.21036731079221 +1.42475944012403 1.2920651063323 +-0.281090524047613 -0.954303611069918 +0.601445287466049 -0.498655619099736 +-1.2658800650388 0.979802508838475 +-0.512786220759153 1.34169249609113 +-1.26154347416013 1.12578151375055 +1.93231069017202 0.383767016232014 +-0.224574209190905 -1.64626253955066 +1.02939848136157 -0.933060930110514 +-1.30240588169545 -1.17583155073225 +0.231246748007834 1.29871719051152 +1.1757476432249 -0.774018423631787 +0.291613848879933 1.64804521203041 +-1.18025195691735 -0.391914318315685 +1.85247161984444 -0.694783070124686 +1.46543626207858 -1.21365356817842 +0.728032428771257 -0.451201918534935 +-1.19276487454772 -0.390832613222301 +0.525761525146663 -1.52041977830231 +-0.263630700297654 -1.44146983139217 +1.22294060979038 0.091745326295495 +-1.3986959066242 -0.805636549368501 +0.448535280302167 1.67479331698269 +1.75978844612837 0.943809259682894 +-1.57777615077794 -0.724707300774753 +1.52596867829561 -0.930987647734582 +-1.15727929864079 -0.584296567365527 +-0.263805781491101 -1.52670259121805 +1.50780288688838 -0.737234420143068 +0.448769759386778 -1.47430935129523 +-1.50256360229105 1.02173970546573 +-0.30449438188225 -1.40815851558 +1.18299401178956 0.94463559333235 +-1.15250049438328 1.25116443447769 +0.0977019406855106 1.70976028498262 +-1.52974207606167 -0.59063755441457 +1.41529094893485 1.18589990586042 +0.621610656380653 -0.099701214581728 +-0.110648792237043 -0.0822013784199953 +0.881873012520373 1.33060766197741 +-0.0919217560440302 -0.641201674938202 +0.0385703602805734 -0.525990007445216 +-1.52704691980034 -0.293241203762591 +-0.183017211034894 -0.438973185606301 +-0.449178354814649 -0.699538564309478 +0.766454827971756 0.0266157798469067 +-0.313843991607428 0.796234250068665 +-0.942782728001475 1.41782646812499 +1.85637696273625 0.140845446847379 +-0.574220235459507 -1.87100264616311 +-0.374978203326464 1.71002439595759 +1.36589987017214 1.20865929406136 +-1.15423727314919 1.34336482360959 +0.491871862672269 -0.723928303457797 +0.954438371583819 1.45598947908729 +-1.70554573927075 0.131352342665195 +1.47684695571661 -1.0246452530846 +0.796611866913736 1.59875570423901 +-0.02407640311867 0.683846615254879 +-0.347968886606395 0.692297423258424 +-0.861417029052973 -1.46282894164324 +0.440396010875702 -0.51136442925781 +-1.48128374759108 -1.22141605988145 +-1.8550948118791 -0.129287678748369 +-0.224384096451104 0.865177122876048 +1.2884223703295 0.253109862096608 +-0.725007362663746 1.48367225099355 +1.02055768575519 -0.321715896949172 +-0.588099087588489 -0.158499258570373 +-1.21796211227775 0.559813167899847 +1.09859592374414 -0.599944870918989 +-0.687692772597075 -0.296876076608896 +-0.00650979671627283 0.963476903736591 +0.092324829660356 0.693330040201545 +1.22531879879534 1.07193569745868 +-1.2052769754082 -0.233924381434917 +-0.570585824549198 1.08450212515891 +-1.32593577168882 0.987592332065105 +-0.67393484711647 1.09718558005989 +1.38697142899036 -1.28568962682039 +-0.323948216624558 -0.9855828108266 +0.786340372636914 1.66472214274108 +-1.16316619608551 -1.491395637393 +-0.939196358434856 -0.619920565746725 +0.571865263395011 -0.452691942453384 +0.386684942059219 -1.18010801263154 +1.53110773861408 -0.893975406885147 +0.419380604289472 1.75145567115396 +-0.129982928745449 -1.29140878096223 +1.38977326266468 -1.16424734331667 +1.35510710347444 0.826500322669744 +-0.035651826299727 -0.223439136520028 +0.00499914493411779 0.370055332779884 +1.12811048142612 -1.23917927220464 +0.560956518165767 -1.21684108581394 +-0.677339948713779 0.393009305000305 +-1.19984052982181 -0.309894077479839 +1.13750084023923 0.805103646591306 +1.00040601845831 0.900610932148993 +-0.637853438034654 -1.69928962271661 +-0.950519800186157 -0.634763670153916 +0.486269724555314 -0.0819177087396383 +-1.39997431915253 1.25136259570718 +1.10189284849912 0.710265849716961 +-1.71300199814141 -1.02343998942524 +0.839734442532063 -0.241602132096887 +-1.7712831934914 -0.137109508737922 +-1.4755246322602 -0.889976048842072 +-0.402579514309764 0.307467072270811 +-0.365774787031114 0.384822117164731 +0.581291147507727 0.418659571558237 +-0.0301706045866013 -0.734023896045983 +0.139361492358148 0.34397486038506 +0.104628777131438 0.206855311989784 +1.92432087939233 -0.0410435181111097 +-0.24861841276288 -1.89072724245489 +-1.36127321049571 -0.056983613409102 +0.688950560055673 -0.197215829975903 +-1.49944675713778 0.839312545955181 +-0.157951456494629 -1.50737146753818 +1.30302209965885 1.07868641242385 +-1.07442152965814 -1.36452524457127 +-0.649064936675131 1.15145811066031 +1.60100456140935 0.172609910368919 +0.253311784937978 -0.837485461495817 +-1.76639173179865 -0.171634612604976 +1.34767102543265 -0.221442252397537 +1.50540182460099 -1.30232803709805 +0.70453427452594 -1.45933169592172 +1.74812867026776 -0.397090966813266 +-0.240265964530408 -1.87209430802613 +0.559783592820168 1.46188125479966 +-1.5686399564147 0.809731431305408 +1.62914753239602 0.00500668864697218 +-1.15426225587726 1.11341117881238 +0.759484444744885 -1.45427019707859 +-1.65234833396971 -0.469351418316364 +0.266682866029441 -1.60764455422759 +0.463774710893631 -1.45783943869174 +0.412507907487452 -0.514210148714483 +0.752754771150649 0.145842032507062 +-1.26128904800862 1.2803761260584 +-1.66859848052263 1.07618088554591 +-1.14102654438466 -0.150922146625817 +1.2793118879199 -0.594836231321096 +-1.60563776642084 0.798131296411157 +-1.51736565027386 -0.687672754749656 +-1.0814394755289 -0.476145611144602 +-0.287424077279866 1.14975765161216 +0.829990496858954 -0.261834972538054 +0.536464772187173 -0.380311545915902 +1.30840347427875 -1.10725626442581 +-0.675915532745421 -0.389805518090725 +0.00266427360475063 -0.95051059499383 +-0.306315713562071 1.63232303131372 +0.369025196880102 0.0355109088122845 +-0.659921397455037 1.07685619499534 +1.84747718926519 -0.500567195005715 +-1.04768424667418 -1.56105011980981 +1.49711074493825 1.17945443093777 +0.771913248114288 0.504240178503096 +0.78258385322988 0.859951737336814 +1.63280081469566 -0.86651248857379 +0.885318304412067 0.430169799365103 +-1.39128505717963 -0.929057572968304 +-0.71568510774523 0.975679317489266 +-1.3620808403939 -0.18614349141717 +0.926588283851743 -0.795173360034823 +-0.300977975130081 -1.68912777304649 +-1.48534179106355 -0.599935604259372 +-0.352006276138127 1.10191970784217 +0.699959839694202 1.15217521693558 +-1.22476645838469 0.302542184479535 +1.16428205464035 1.53060195222497 +1.324582464993 -1.20416316390038 +0.0105612780898809 -0.0141890002414584 +0.156267285346985 1.70362575817853 +0.0573025159537792 0.694100740365684 +0.177995545789599 -1.34054169990122 +-0.850660624913871 -0.480579563416541 +0.643397049978375 0.364555332809687 +0.219738803803921 -0.944995950907469 +1.18312720581889 0.653232585638762 +-0.839660998433828 0.0260485764592886 +-0.426635389216244 0.802083181217313 +-0.624456078745425 -1.19317074120045 +-0.122997748665512 0.017036565579474 +1.06062407232821 -0.936503489501774 +0.440710565075278 1.72384609282017 +0.418778943829238 -0.971493195742369 +-1.50907962024212 0.973430980928242 +0.369405953213573 -0.450761986896396 +0.328125696629286 -1.02505914121866 +-0.624848264269531 -1.11980354599655 +-0.480409812182188 -0.744460917077959 +1.90044381748885 0.0355749456211925 +1.54488646239042 0.137842463329434 +-1.18799782171845 0.96410515345633 +-0.252255760133266 0.881793428212404 +-1.16646717023104 1.05892247706652 +-0.79488966986537 0.328943636268377 +-0.957450036890805 -0.109795107506216 +1.11314276792109 -0.0467192279174924 +-0.880857526324689 1.04685260448605 +-0.181797750294209 0.716172297485173 +-1.16343533340842 0.961824052035809 +-1.51125712413341 0.094185147434473 +1.02627120167017 -0.785741707310081 +-1.14369520545006 -1.3139234976843 +-0.961860467679799 1.56965554691851 +0.250909687951207 1.20835737604648 +1.92156297713518 0.0437295446172357 +-1.89899030420929 0.524951071478426 +-0.25102524086833 1.52063751593232 +1.31870778370649 -0.468172767199576 +0.436677655205131 -0.961254618130624 +0.24170739762485 -1.10575853195041 +1.69882313441485 -0.872935455292463 +0.42935920227319 -1.90431405883282 +0.214374478906393 -0.914571193046868 +-0.0263194208964705 -1.94078337866813 +-1.82006189227104 -0.157425370998681 +-1.32712501566857 0.585383108817041 +-1.96976708900183 0.294025376439095 +-0.900291244499385 0.0254508424550295 +0.846719892695546 -0.967168539762497 +-1.33756718877703 -0.957193937152624 +-0.486899529583752 0.379025590606034 +1.87927624862641 0.112318730913103 +0.583016245625913 0.441694767214358 +1.3046834692359 -0.556647664867342 +-1.29388428200036 -1.16866521257907 +-1.35780243948102 -0.163711350411177 +-1.32495079375803 -0.800987933762372 +1.18118313234299 -0.218780864961445 +0.564159299246967 -0.63018040265888 +-0.0993863679468632 0.987756772898138 +-0.192327350378036 -0.280227753333747 +0.701393526047468 -1.81458767317235 +1.61087437067181 0.151461785659194 +1.14363614656031 0.0843811566010118 +-1.20964498538524 0.469270646572113 +-0.939279333688319 0.874537957832217 +-1.27596306148916 1.12773271370679 +0.611755112186074 -0.112797313369811 +-0.696112924255431 -1.08806368336082 +-1.27335336152464 1.17501647025347 +0.830240419134498 -0.513795416802168 +-0.0417923862114549 -1.68051848374307 +0.877897295169532 1.52865052875131 +1.18735771346837 -0.786670093424618 +0.943159770220518 0.458896786905825 +-1.29568271897733 -1.14431043993682 +1.67603968363255 1.0759486798197 +1.09135637897998 1.44437827263027 +0.985641180537641 -1.22294784989208 +0.429635957814753 1.05216888617724 +-0.974091469310224 -1.54823119193316 +-0.596973711624742 -0.766574901528656 +-0.403255116194487 -0.306269214488566 +-1.17161825392395 -1.21913199033588 +-0.546229191124439 0.932906660251319 +1.82967779226601 -0.130413798615336 +-1.52658643573523 -0.809272539801896 +0.33618604298681 -0.690336526371539 +0.848685117438436 -1.3519528657198 +-1.52278913930058 0.895362514071167 +0.146750073879957 1.54578777588904 +0.0484804408624768 0.67425927054137 +-0.937868623994291 0.0257064709439874 +0.0573088536038995 0.209066492505372 +-0.15417761914432 -1.37899199780077 +-0.723395237699151 0.321310746483505 +-1.88880789652467 -0.631519003771245 +-1.59132068231702 -1.15755794104189 +0.0921109728515148 0.896970814093947 +0.472582258284092 -1.82931031659245 +0.292831659317017 -0.442669592797756 +-0.544524948112667 0.220664957538247 +-1.14011089969426 1.14493013359606 +-1.4946714527905 0.438014423474669 +0.325825944542885 0.585389513522387 +1.10674416739494 0.0686863027513027 +-1.01675589568913 -1.12720094062388 +-0.16202646959573 0.465925217606127 +-0.190558820962906 1.90368489641696 +-0.210655580274761 -0.597070856019855 +-0.554773356765509 -1.59878713358194 +0.294024356640875 -1.56472763605416 +0.545069435611367 0.691652564331889 +-0.345966514199972 -1.10398121271282 +-0.24124233238399 -0.441992091014981 +1.29755006730556 -0.45805199444294 +-0.94076250679791 -0.900253646075726 +0.7645054878667 -1.26609652489424 +0.114875745028257 0.628888435661793 +-1.69369018916041 0.615469914861023 +1.72523135133088 -0.261734747327864 +-0.136323655024171 -1.02809772919863 +-1.75940136518329 -0.137700316496193 +1.97614670079201 0.0597118241712451 +-0.644030717201531 0.5994878038764 +1.106216003187 1.2545700930059 +1.18715523742139 0.637194075621665 +1.70969479158521 -0.599669809453189 +0.939052735455334 -0.318194688297808 +1.10635595861822 0.0775206089019775 +0.108752984553576 -0.140680932439864 +1.25831670314074 1.25790064129978 +0.493734501302242 0.814700558781624 +1.5617454200983 0.410942892543972 +-0.183057208545506 -1.41227330360562 +-0.787543047219515 -1.39060176722705 +0.0533447246998549 -1.03933882061392 +-0.626346869394183 -1.78992766421288 +-1.69408174976707 -0.421720127575099 +-0.134515318088233 0.916610787622631 +0.391330030746758 0.286425734870136 +0.040116080082953 -0.929673017933965 +-0.616167100146413 1.34788187686354 +0.618308869190514 0.408702913671732 +1.36191173270345 -1.10332809295505 +-1.04587348736823 -0.98981265258044 +-1.55666393321007 0.355405635200441 +1.25149514805526 -1.06285849492997 +0.916924433782697 -1.14460454974324 +0.403771486133337 1.66911556012928 +-1.11255563329905 -0.725680022500455 +1.34938451461494 0.4003870729357 +-0.696486128494143 -1.45638557244092 +-0.715272930450737 1.43288532923907 +0.695482232607901 -1.72505190689117 +1.17751459591091 1.38312949147075 +-0.632048286497593 -0.680284763686359 +-0.161174053326249 1.80953325424343 +1.80588920786977 0.0973641881719232 +1.3773623239249 1.37298535369337 +-0.522171781398356 -0.442256389185786 +0.490469777956605 0.817027642391622 +1.85919948294759 0.293762651272118 +1.29746069200337 1.15178601164371 +-0.870195955038071 -1.5154290869832 +0.977762268856168 -0.111511026509106 +-1.38919201586396 1.28475766535848 +-0.305790520273149 -0.419203518889844 +0.264027968980372 1.44517211616039 +-1.37679634895176 -1.23959288652986 +0.0380887510254979 -1.61869003903121 +0.126347786746919 -0.633548648096621 +-1.26281701028347 1.22245881892741 +-0.818578612990677 0.291371640749276 +0.537762678228319 -0.83310654386878 +0.750510262325406 -0.708384233526886 +-0.496024280786514 1.31052071694285 +-0.890989851206541 1.33286877721548 +-0.499307598918676 -0.269688731059432 +-0.789596871472895 -0.12363351136446 +-1.67564953863621 0.453744646161795 +1.26913359761238 0.737331843934953 +-1.17740199808031 0.794716606847942 +0.194273508153856 1.83197917509824 +-0.185674777254462 -0.00981238018721342 +1.93981849681586 -0.352094225585461 +-1.82781536504626 0.723294435068965 +-0.767693402245641 -0.0581452287733555 +0.396716079674661 0.113321792334318 +1.15797068923712 -0.437856762669981 +-1.09538468532264 -1.3288644393906 +-0.976263541728258 -0.933035875670612 +0.0753366183489561 -0.0241671809926629 +-0.198656246066093 0.350791382603347 +-1.12526342645288 -0.494541264139116 +-1.59396138694137 0.298676872625947 +0.525201933458447 0.901384066790342 +1.47332587558776 -1.25355139002204 +-1.31378239393234 0.459570062346756 +-1.11749739106745 0.501783103682101 +-0.843799507245421 -1.22054968681186 +-1.86732161324471 -0.436594933271408 +1.30041201878339 -1.36732302419841 +0.466861924156547 1.47255230136216 +0.890451160259545 0.648804862052202 +1.09804633166641 0.741436949931085 +0.357045426033437 -1.46313540358096 +1.50906842201948 0.703891513869166 +-1.33693516999483 -0.321485192514956 +-0.870614120736718 -1.36822956707329 +-0.165924782864749 -0.600020979531109 +1.37423345353454 1.09266269858927 +-0.146922876127064 0.561098085716367 +0.986063408665359 -0.256606604903936 +-1.38459388539195 0.168966354802251 +0.519622694700956 1.33850904554129 +1.06358625274152 1.29645502008498 +0.234522984363139 0.00337113346904516 +1.2567165158689 0.784950555302203 +-1.12862075120211 0.42640056181699 +0.482097097672522 0.151639861986041 +0.914424131624401 -0.527287629432976 +-1.47665504086763 1.27827036194503 +-0.529260830022395 -0.205970360897481 +-0.289738888852298 0.888735506683588 +1.27505262382329 -1.47728662099689 +0.673022218048573 -0.806908337399364 +-0.841380900703371 -0.847623653709888 +-0.284369929693639 -0.637748967856169 +0.947240943089128 -0.682589837349951 +-0.176666988059878 1.84283917117864 +-0.202098515816033 -0.950656796805561 +-0.0788525016978383 0.450779628939927 +0.109330248087645 -1.50925056170672 +0.682693454436958 0.26744697522372 +0.121610919013619 -0.516751631163061 +0.621569337323308 -1.64354128949344 +-0.542909399606287 1.01333346031606 +-0.78201200813055 1.03072162810713 +-1.42776048742235 0.937213669531047 +0.532544096931815 1.26024465356022 +1.35768727958202 -0.405584113672376 +0.058358097448945 0.0665113013237715 +1.80799647420645 -0.821712840348482 +0.50716001726687 0.836246151477098 +-0.527014232240617 1.24786520656198 +-0.968934766016901 0.654266186989844 +-1.86145104188472 0.261545117944479 +-1.11674590781331 0.12029707338661 +-0.68937259260565 -1.80844426993281 +0.267589542083442 -1.84820327442139 +0.862307533621788 -0.473696035332978 +1.85656328592449 0.592787069268525 +-0.0459321402013302 0.648306683637202 +-0.742548851296306 0.23959674872458 +0.27235478721559 -1.38201389927417 +1.03421747125685 -0.0189065951853991 +-0.369740076363087 -1.92722477111965 +-0.409787614829838 0.203436327166855 +0.892793887294829 0.342942121438682 +-0.675333385355771 1.12716298270971 +0.683503929525614 0.0843710470944643 +0.625348817557096 -0.601704916916788 +-0.657582622952759 1.82173711527139 +0.147036168724298 -1.78113133460283 +-1.23562632966787 -1.32497629895806 +0.126333023421466 -1.96679007075727 +-0.667144738137722 -0.964886451140046 +-0.615479392930865 -1.48531391751021 +0.44230080395937 -0.826981552876532 +-0.389892132021487 0.12414541374892 +-0.91389752458781 0.410599892027676 +0.0682864068076015 -1.97229687962681 +-1.09855180699378 -1.38436406105757 +-0.158718345686793 -0.185551990754902 +0.343270067125559 -0.618494426831603 +-1.02097230870277 -0.679717697203159 +-0.676681512966752 -1.35613386239856 +-1.56759808398783 -0.18583420664072 +-0.56301418505609 -1.23970095813274 +-0.119820109568536 0.64547991938889 +-0.0262270597741008 0.387188035994768 +-0.257924364879727 1.06742046307772 +-0.972025625407696 -1.33523880969733 +-0.277336793020368 0.553830143995583 +0.805392069742084 0.109916736371815 +0.324493275023997 0.0757142631337047 +-0.634502114728093 0.415633500553668 +-0.429694505408406 -0.87281159311533 +-0.837736547924578 1.20393420103937 +1.05780629906803 -0.0255729192867875 +1.47320535313338 1.10242025926709 +0.0572655592113733 -0.779565400443971 +0.696379137225449 1.82706586644053 +-1.31456594541669 -1.2990526901558 +-0.0726695470511913 1.77147386875004 +-1.82782305032015 0.54346527159214 +0.418629786930978 -0.258787805214524 +-0.0737128369510174 -1.84515034034848 +0.0411037458106875 1.9044427536428 +1.2084920629859 0.566753807477653 +0.752102501690388 -0.519954209215939 +1.76208896934986 -0.143356650136411 +0.751732049509883 -0.793452780693769 +1.64460089989007 0.687873712740839 +-1.44291564729065 -0.937714114785194 +-1.20216139033437 -0.197979835793376 +-1.0124918371439 -1.18281959928572 +0.818321417085826 -1.82489194627851 +-1.40539166890085 -1.20325383823365 +0.00332575291395187 1.78506051190197 +-1.48567212559283 -0.5556749580428 +0.50350642669946 -1.784349584952 +1.63014705106616 -0.632147952914238 +-0.880543742328882 -1.23389443103224 +0.146642775274813 -1.05430671386421 +-0.33482239767909 -1.68000175338238 +1.07296407036483 0.726460297591984 +-1.24926344770938 -0.334818103350699 +1.45555133279413 -1.32772903703153 +0.461785819381475 -0.787263228558004 +1.36713190749288 0.371375118382275 +1.42488010134548 0.953955947421491 +-1.02516396250576 0.893426476977766 +-1.34870850853622 -0.868436711840332 +1.56241542752832 0.727699015289545 +-0.0195325845852494 -1.78888681344688 +-1.27054710220546 0.841660021804273 +0.251681047491729 0.622721627354622 +-1.00636272132397 0.531664806418121 +0.949668671935797 0.793109443038702 +-0.520263029262424 0.781134593300521 +0.816556288860738 -1.26776903588325 +-1.06239462643862 1.10228957701474 +-1.27167936135083 -1.10472986008972 +0.983992865309119 -1.45684390328825 +0.0300481338053942 -1.48380107153207 +-1.51537411287427 1.08121807314456 +0.162531496025622 0.00961828324943781 +-0.86236588191241 -1.78831721283495 +-0.94208212941885 1.22426592931151 +-1.4584239423275 -0.331986446864903 +-1.17299314867705 -0.0919173303991556 +-1.28483442775905 0.345456155948341 +0.744360726326704 -1.07575479708612 +1.50364366173744 0.431059150956571 +0.929399750195444 1.06922070216388 +1.34785725921392 -0.1050167940557 +-0.687608459033072 -0.0199644044041634 +0.233945447020233 -1.74592851661146 +-1.71397239807993 0.249165365472436 +-1.23802508506924 1.02938597369939 +1.44739928562194 -0.0655524525791407 +0.933359242044389 0.19914171192795 +1.77097514364868 -0.0607229555025697 +1.21184821706265 0.25104670971632 +1.24312654323876 -0.836664613336325 +0.230457114987075 1.65131836012006 +1.44557219743729 0.716188114136457 +0.461025436408818 1.45266860723495 +-1.64038729388267 0.671925847418606 +-1.79982972797006 -0.388084477744997 +1.17110280506313 0.780542244203389 +-1.38235010672361 1.41578405257314 +-1.79539246950299 -0.305559748783708 +-0.661207962781191 0.894535971805453 +0.675251799635589 1.11775035504252 +-0.0734649673104286 0.0866161268204451 +-0.0145957497879863 1.94628304895014 +0.603922348469496 -0.995549176819623 +0.435280633158982 -1.63763103913516 +-1.278385091573 -0.689972264692187 +-0.614308271557093 -1.74813785031438 +-0.164695722982287 0.865970762446523 +-0.254408143460751 0.597927796654403 +-1.61799795739353 0.886360581964254 +-0.383606797084212 -0.643960242159665 +-0.677538594231009 1.04001270420849 +-1.16788842435926 -1.5041369209066 +0.742210840806365 -1.11759879626334 +-0.695573080331087 -1.29992590192705 +0.42633312009275 0.425310648977757 +0.562649513594806 -1.28826362453401 +1.26155687402934 -0.897061838768423 +0.121370476670563 0.310133212246001 +-1.21791587676853 0.907953121699393 +1.25117918476462 0.621444490738213 +1.69301449228078 -0.230337499640882 +-1.15705979522318 0.889974624849856 +-1.01056609116495 0.62280320096761 +0.939925953745842 0.0121911102905869 +-0.862349621020257 -1.04462434910238 +-1.61950901709497 0.0558938188478351 +-0.418815362267196 -1.292883859016 +-1.26459874026477 1.11290203779936 +0.947699381969869 -0.0765240853652358 +1.14753045048565 0.404129292815924 +0.872847631573677 1.02318008430302 +0.761473125778139 -1.45623775292188 +0.931288756430149 0.368162874132395 +-1.48019641451538 -0.722044049762189 +-0.496107048355043 0.862366668879986 +0.742559237405658 1.43712653312832 +0.935608243569732 1.69594819005579 +-1.19633216969669 1.56954739708453 +-1.09695392567664 -1.52142576593906 +0.594643758609891 0.806221251375973 +1.15738382935524 0.281577955000103 +-1.36108608078212 -0.0577497761696577 +1.02950045559555 0.848391307517886 +-1.15005294047296 0.487957900390029 +1.5985261797905 -0.334601937793195 +1.72858414333314 -0.471031221561134 +-0.225410492159426 1.39335542730987 +-1.1311574364081 1.4723351188004 +1.28110573533922 1.16660374868661 +1.53739309124649 0.10495731420815 +0.206459444016218 -1.07481584884226 +-0.246508252806962 1.80300801899284 +1.46729841735214 1.21363840810955 +-0.82297486346215 1.54377149045467 +0.266318300738931 -1.81412699632347 +0.595951264724135 0.763902518898249 +1.12861633114517 -0.914884425699711 +0.305970283225179 1.23967075254768 +0.974105801433325 -0.63345738966018 +0.30129649117589 -1.08073500171304 +0.730886312201619 1.54358565062284 +1.34537414927036 0.0910974545404315 +1.86182260047644 0.26938043627888 +-1.70740994624794 0.654102604836226 +1.5624417392537 -0.156043224036694 +0.886148227378726 -0.2463582996279 +-0.659967652522027 -0.401046871207654 +0.822776034474373 -1.1430828012526 +-0.952534286305308 0.892108694650233 +-0.791458512656391 -1.39958480652422 +1.74307559616864 -0.134290265850723 +-0.63657402433455 0.439847931265831 +-1.32385667786002 -0.365809334442019 +-0.495061609894037 1.48845621291548 +0.0681495666503906 -0.635851594619453 +-0.429577955976129 1.01609003730118 +0.602190937846899 -0.777967216446996 +-1.27153314929456 -1.04277626797557 +1.23156644962728 1.12007167097181 +-0.574464448727667 -0.993528809398413 +-0.96484418399632 1.67022327985615 +1.62849725503474 -0.0774146588519216 +-0.824929657392204 1.44315992947668 +1.72409292217344 0.0660454966127872 +-0.346760500222445 -1.34766133036464 +-0.390844442881644 -1.32727044448256 +0.756609238684177 1.45591316372156 +-1.15623167529702 -0.959771806374192 +-1.72334459982812 0.326063923537731 +-1.16243899706751 0.305824894458055 +1.3555658031255 -0.779287978075445 +0.249292535707355 -0.774083442986012 +0.0459453538060188 0.610202293843031 +1.30906812380999 0.308272056281567 +0.0187880443409085 -1.15318990498781 +0.699383743107319 -0.494437316432595 +1.69459684286267 -0.873958469368517 +-0.712872280739248 -0.612311172299087 +-1.19134314451367 1.13557984214276 +0.126317672431469 1.46303575672209 +0.305554136633873 1.62721007037908 +0.920262415893376 1.15030689537525 +0.128876960836351 -1.35645740292966 +-1.34443838894367 -0.354614100418985 +0.978681397624314 0.639969008974731 +1.38205440714955 1.12401172704995 +1.56898688711226 0.913501839153469 +0.982998048886657 -1.38349497690797 +-0.568213048391044 -0.959213909693062 +-0.349594757892191 1.49396511539817 +1.23282235767692 -0.392240324988961 +0.938486393541098 0.785769670270383 +1.27431940101087 -0.689450026489794 +0.987135071307421 -0.533492231741548 +-0.462965316139162 -0.956274593248963 +-0.431721959263086 0.540629931725562 +-1.82780817616731 -0.00902074482291937 +-1.42842192295939 -0.126242178492248 +0.599417382851243 0.623556399717927 +0.228945650160313 -1.74957854021341 +1.19831923488528 -1.56829975545406 +1.71752407681197 -0.315323360264301 +0.0933673782274127 1.1884593879804 +1.57478172518313 0.377487934194505 +-1.57591708004475 -0.876114644110203 +1.30139253847301 -0.00681288819760084 +0.0249897064641118 -0.744596525095403 +-0.718670709989965 -0.0686423741281033 +0.547845313325524 -0.00289031025022268 +0.0139268636703491 1.26069335732609 +-0.358228215016425 1.44476388487965 +-1.35465904418379 1.46599113382399 +1.16340266913176 -0.920431487262249 +-1.79902971535921 -0.38348461035639 +0.178882915526628 1.68000168539584 +0.0657593989744782 -0.533326478675008 +1.19212568178773 -0.625196850858629 +1.74446917697787 0.816374066285789 +-0.913540826179087 1.19872124958783 +1.82476255763322 -0.754174121655524 +1.94925514888018 0.287012752145529 +1.39828033931553 0.555331422016025 +1.12163297925144 0.307199259288609 +-0.527916769497097 -0.108475293964148 +1.02745950315148 -0.533147136680782 +-0.127228250727057 -1.78495363891125 +1.39243145659566 -0.940652008168399 +-1.27421441208571 -0.0225886348634958 +0.518186082132161 0.831601635552943 +1.41538771614432 -0.239796514622867 +-0.824650909751654 0.703026076778769 +-0.686446195468307 -0.953635110519826 +1.44354864396155 1.19311079848558 +-0.0251232450827956 -1.13904331903905 +1.03741959203035 1.5251401020214 +-0.879638781771064 0.834172550588846 +0.989586689509451 0.47308623790741 +1.60616988502443 1.02587207965553 +-0.113190874457359 1.39419838879257 +0.70200976356864 0.628323912620544 +-1.64833792950958 0.171206189319491 +0.528869850561023 0.941011280752718 +-0.484666366130114 1.52353772893548 +0.782167674042284 -0.213741045445204 +0.199040984734893 0.244564882479608 +-1.27337749581784 1.49800510890782 +-1.15752285160124 -0.373711912892759 +-0.579346694052219 -1.05923517607152 +0.351241731084883 -1.96276056021452 +0.660818269476295 -0.756574408151209 +-1.44841431546956 -0.904404601082206 +-1.37903887871653 0.380850754678249 +-1.07506210263819 -0.427140834741294 +1.17865779902786 -0.779160464182496 +-1.45201694499701 0.922877300530672 +0.209400882013142 -0.559099651873112 +-0.685248240828514 -1.84257157240063 +-0.958395675756037 -0.342237514443696 +-0.635653629899025 0.125852302648127 +0.560810014605522 -0.860874738544226 +-0.794618551619351 0.749876141548157 +-1.20690120849758 -0.555543661117554 +-0.364361448213458 1.07060994300991 +-0.964370559900999 -0.678460117429495 +-1.64080237876624 -0.600389355793595 +1.35618538688868 0.309551901184022 +-1.58844424504787 -0.281883993186057 +1.69617165997624 -0.51907685212791 +0.374874741770327 1.82333723735064 +0.0294924229383469 -0.328110927715898 +-0.291200109757483 -0.771551119163632 +-1.65745273511857 0.667337651364505 +1.33375841099769 0.320134160108864 +-0.0725914547219872 1.18590884562582 +1.65289126615971 0.621044812723994 +-1.07483973260969 0.587301294319332 +0.669309856370091 -1.56299277115613 +-0.797353262081742 0.544954409822822 +1.37361477967352 -0.0339854191988707 +-0.696744276210666 1.16607642825693 +-1.43691564816982 0.896185918711126 +-1.15951539669186 1.51513235736638 +-1.57913719210774 0.0685839829966426 +-0.175704226829112 0.890418852679431 +1.05919103417546 0.665081134065986 +-0.974621838890016 1.18887013196945 +0.21827594190836 -1.4031990962103 +-0.101148846559227 1.17289494350553 +1.19662274327129 0.563538226298988 +-0.0497599486261606 1.05298415198922 +-1.83173712901771 -0.106754231266677 +0.336770789697766 -1.4277155501768 +0.536749619990587 0.382612870074809 +-0.372302308678627 0.109259854070842 +0.843444492667913 1.0506417285651 +0.857931383885443 0.314980787225068 +-0.264342794194818 1.83833534549922 +1.33718304522336 -0.628223110921681 +-1.3895009374246 0.381414378993213 +0.442850735038519 -1.63237256929278 +-1.59550091251731 -1.10332379583269 +0.731723435223103 0.649590726010501 +-1.47696212120354 0.669449605047703 +-0.764660742133856 0.743468990549445 +0.534276661463082 1.90521296206862 +1.61643521115184 0.506769614294171 +1.65196245722473 -0.961018497124314 +0.0819295318797231 -1.650091082789 +-1.68530686199665 0.788804263807833 +-0.856131084263325 -0.297190062701702 +0.875013965182006 -0.808157155290246 +0.10929817147553 0.229694209061563 +-0.311738727614284 0.769845438189805 +-0.761597383767366 1.78667593281716 +0.104449158534408 0.0437133694067597 +0.599749126471579 -0.643868151120842 +-0.326332808472216 0.559478672221303 +-0.924744131043553 0.352316426113248 +-0.780413313768804 -1.73271827865392 +-1.30648623406887 -1.13766072876751 +0.825217304751277 1.81650862190872 +0.267617615871131 0.987092418596148 +-1.42021232191473 -0.880277371965349 +0.959799315780401 -1.34428656566888 +1.05471342429519 0.472970427013934 +1.44805157836527 -1.18896829988807 +-0.429682036861777 0.231860565021634 +-0.328667918220162 -1.22084107063711 +-0.0145756099373102 -0.592872308567166 +-1.00353978201747 -0.366844785399735 +0.253954799845815 -1.492396119982 +0.563852311111987 0.00276163965463638 +-1.36595665477216 1.30129401106387 +0.075376340188086 -1.27157505601645 +1.46139194443822 -0.299005780369043 +-0.993117130361497 0.68957032635808 +1.56579496152699 0.838244906626642 +-0.96840796712786 0.830949828028679 +-1.33314367476851 -0.667041319422424 +-0.0542755611240864 0.720994547009468 +0.546753372997046 -1.89451371133327 +0.727263036184013 -1.4678748510778 +1.25734406895936 0.774854378774762 +0.813383016735315 1.51887594349682 +0.233940525911748 -0.775101321749389 +0.286846836097538 1.47074796259403 +1.1423920923844 -0.914215156808496 +-0.708683946169913 0.645856148563325 +-1.20684620458633 -0.110797733999789 +-0.452223619446158 -0.897520242258906 +-0.996070734225214 0.14359513297677 +-0.954881871119142 -0.535596164874732 +-0.213566279970109 -0.684738770127296 +0.237765861675143 -0.885690307244658 +1.12305857986212 -0.741918148472905 +-1.3224541656673 -0.768325228244066 +0.260420032776892 -1.24900882970542 +1.79048436973244 -0.199301331304014 +-0.292236941866577 1.01021839305758 +-1.36518119554967 -1.10542207583785 +1.03360848966986 -0.938758950680494 +0.068202524445951 -1.11579996813089 +0.0423542577773333 0.00981144234538078 +-1.51423633657396 -0.681632529012859 +-0.711545338854194 -1.03799449186772 +-0.662386824376881 0.623132148757577 +1.37696217745543 1.44157105125487 +1.78241366147995 -0.743427207693458 +-0.148187057115138 0.67307907436043 +-0.30612107552588 1.3410909017548 +-1.78026108536869 -0.350240067578852 +1.37117709312588 -0.253048127517104 +-0.948031112551689 -1.34894793480635 +0.386995697394013 1.09629748482257 +-1.29123236052692 1.38112971186638 +0.45670048519969 0.108124748803675 +-0.566535111516714 -1.47715982422233 +-0.31699113547802 -0.831565037369728 +1.24755830410868 1.50353937316686 +0.837524987757206 -0.13125313539058 +-1.85615164414048 -0.239924778230488 +-1.67702047061175 -0.765132121741772 +-0.287772781215608 -1.11796794738621 +0.0957838846370578 0.0335619207471609 +-0.00106665957719088 -1.46198696363717 +1.93784931953996 -0.233060928992927 +-0.611666291020811 -1.44890719559044 +0.635072611272335 1.65298413299024 +-0.214895534329116 -0.254089045338333 +-1.66122562810779 -0.642227616161108 +1.81813370808959 0.0072372006252408 +-1.37473423033953 -0.247651149518788 +0.447236390784383 -0.538485952652991 +-0.613925595767796 1.74653437267989 +0.293687109835446 0.0979257011786103 +-0.334402432665229 -1.27612962946296 +0.937522375956178 -0.302844233810902 +0.280742857605219 -0.359107836149633 +0.136670087464154 -0.587981426157057 +0.674312560819089 -1.86703510209918 +-0.184309291653335 -1.60400695167482 +-0.043608664534986 -0.438965640962124 +0.749046833254397 1.34458761382848 +0.788826366886497 1.30406315904111 +-0.747287563048303 -1.65504985582083 +0.704374859109521 -0.726855249144137 +-0.10494779329747 -1.99646018538624 +1.19712344463915 1.44554159510881 +1.81021552998573 -0.0264259809628129 +-1.74089694023132 -0.931240437552333 +-0.0378056867048144 1.03524584695697 +-1.62234905362129 -0.952871521934867 +1.3973284792155 -1.29990498162806 +-0.135331986472011 1.70205178111792 +1.32900445349514 0.488284503109753 +0.106330732814968 -1.31522510014474 +-0.000717475078999996 -1.10099257342517 +0.169717692770064 -0.631116949021816 +0.697743561118841 -0.641182915307581 +-0.153463765047491 0.754954802803695 +0.982359321787953 0.122837694361806 +-0.481917093507946 -0.299218949861825 +-0.93604386318475 -1.39810483716428 +-0.570385709404945 -0.870816274546087 +1.58313992526382 -0.569005430676043 +0.426948305219412 1.35428927093744 +1.11639285087585 -0.680840671062469 +1.36264612432569 1.40013612899929 +-0.186724064871669 1.3135623363778 +1.40452871471643 -0.650161729194224 +-0.969409653916955 -0.566116073168814 +1.44212689902633 -1.01889310684055 +-1.1740496950224 0.970871929079294 +-0.887704613618553 -0.216605106368661 +1.62675713095814 0.839605237357318 +-1.21244984772056 -0.904300906695426 +-0.959033626131713 0.14280997402966 +0.289713812060654 -0.904949134215713 +-1.29436343256384 1.03286637924612 +-1.03826926648617 -0.561182098463178 +-0.664738841354847 0.908000787720084 +1.02726984489709 -0.715152679942548 +0.017191925086081 -1.34986191429198 +1.45811228174716 -0.390979869291186 +1.92572429217398 -0.132798949256539 +-1.07288675568998 0.452050974592566 +-0.304911068640649 1.67886161245406 +-1.26146447006613 -1.09081356506795 +-0.437865437939763 -1.12928257323802 +-1.39954664930701 -0.878908559679985 +-1.44249865692109 0.744677576236427 +-0.164244435727596 -0.656263900920749 +-1.52333105821162 1.12397057656199 +-0.00265246722847223 1.35638238023967 +-0.280870331451297 1.35234857257456 +-1.28327738773078 -0.717426959425211 +-0.543306572362781 -0.208224440924823 +-0.503300431184471 1.66738606337458 +0.774386340752244 0.484133139252663 +-1.35655323974788 0.689954440109432 +-0.026608994230628 -0.981522232294083 +-1.84596613794565 0.693739152513444 +-1.17409023270011 0.249924597330391 +-0.52306248433888 1.90743689239025 +-0.431268508546054 -1.3236320130527 +-0.79377381876111 0.67085874080658 +0.481765422038734 0.237739716656506 +0.934123657643795 0.325822282582521 +0.899403089657426 0.570646387524903 +-0.450936045497656 1.08759628701955 +-1.30783700011671 -0.25489383097738 +-0.885284832678735 0.0483773024752736 +0.478939757682383 1.3352929726243 +0.713819778524339 -1.15635809116066 +-1.14015156961977 1.0263258991763 +0.994792482815683 0.291806583292782 +-1.53353492729366 -0.16087750531733 +-1.22675331309438 0.523176555521786 +1.20442095305771 -0.186783914454281 +1.58627168554813 -0.0470346035435796 +-1.30008505471051 -0.412227516062558 +0.102447746321559 0.473483006469905 +-1.40084709506482 -1.03336916770786 +0.0300567513331771 -1.53423749096692 +-1.5350962812081 1.25163643900305 +0.242879081517458 0.426119192503393 +0.346376350149512 1.16152144875377 +1.67855002172291 -0.814797115512192 +-0.505353432148695 0.659430644474924 +-0.0273344852030277 0.547530229203403 +-1.55002494249493 1.09849757608026 +-0.582099117338657 -0.0154590122401714 +1.54488286748528 0.233415077440441 +1.43798568472266 0.389371765777469 +0.219447804614902 0.711470522917807 +1.17553312797099 1.29766180645674 +-1.01250856555998 0.296915505081415 +-0.00869613885879517 -1.0933755831793 +-0.209318669512868 -0.546534392051399 +0.334043579176068 1.02576751168817 +-1.52478261850774 0.530906595289707 +-0.894166436046362 0.0103220026940107 +-0.754290966317058 1.75319295749068 +1.83805026952177 -0.74492213036865 +-1.87928167171776 -0.534079181030393 +0.0596397006884217 -0.372845460660756 +-1.15782973729074 -0.373879231512547 +-0.549287172034383 0.810515973716974 +-1.17950601503253 -0.276545269414783 +0.796585674397647 -0.49073738604784 +1.61283827759326 -0.00630108918994665 +-1.55624068994075 -0.850334407761693 +0.964231590740383 -1.02671429608017 +1.25920357462019 1.22177987825125 +-1.56106405332685 -0.538586813956499 +1.63446648325771 -0.462262468412519 +0.0866354675963521 -1.25989665277302 +-0.368549903854728 0.383390422910452 +-1.15032179281116 1.31090515386313 +0.268343404866755 0.250478992238641 +0.675825537182391 -0.373344885185361 +1.63899201806635 1.00035810191184 +0.944875815883279 -0.519199537113309 +0.946181771345437 -0.844214834272861 +0.468893045559525 0.113662553019822 +0.440752196125686 0.498387264087796 +-1.05400065891445 -0.669978040270507 +-0.903839726001024 1.59864988178015 +-0.990805927664042 -0.560618088580668 +-1.09849075600505 0.479614657349885 +-0.723889733664691 -0.0782690811902285 +0.926280220039189 -0.955863825045526 +0.262459238059819 0.0182779897004366 +-0.206393706612289 1.93648343533278 +-0.737913846038282 -1.76381198130548 +-0.797551001422107 -0.915796527639031 +-0.0241885939612985 -1.86683935206383 +0.416829428635538 0.303066819906235 +-0.210014384239912 -0.851034112274647 +0.233284019865096 1.61385971214622 +-1.30318464618176 -1.29591377545148 +-0.173048912547529 1.56648991443217 +-0.725037424825132 -0.747549599036574 +-0.159236651845276 0.723596357740462 +0.0513542899861932 -0.785887067206204 +-0.628340099938214 1.43287786655128 +1.30962021835148 0.190753892064095 +-0.506906990893185 1.6327648581937 +1.43361757416278 0.151446941308677 +-0.277393092401326 1.42075447365642 +-1.83597401715815 0.195395178161561 +0.441770509816706 -0.195111791603267 +-1.47825400624424 1.1244558179751 +-1.32716123200953 -1.22363038174808 +-0.636995358392596 -0.884330470114946 +-0.409594989381731 0.429949935525656 +-1.5008353870362 -0.827333090826869 +1.8750929152593 0.492581713013351 +1.49894192349166 -0.569902951829135 +0.938737129792571 0.395979051478207 +-0.400625478476286 -1.47337413486093 +1.2221534345299 -0.406954674981534 +-0.950069470331073 -0.59108854457736 +-0.475235179997981 -1.90422060433775 +1.43706907704473 -0.654734809882939 +0.576782708056271 -0.969233741983771 +-0.416522854939103 -0.412527294829488 +0.751903229393065 1.5611541448161 +0.163802497088909 -1.93830646760762 +-0.412261249497533 -1.71167169418186 +-0.31386349722743 0.267806657589972 +0.30771760456264 1.62461820058525 +-0.36954184807837 -1.45341944787651 +0.383768537081778 1.35872215032578 +0.761868136003613 0.819223592989147 +-0.238526301458478 -0.455337000079453 +0.466038765385747 0.512309369631112 +1.86352449376136 0.460606772452593 +1.79659621696919 -0.778753042221069 +0.37350950948894 -1.14670429658145 +0.59563899692148 1.44120383169502 +1.51698664017022 1.21731121558696 +-0.500896690413356 0.632080901414156 +0.953171339817345 1.73057173751295 +1.87155207060277 -0.669435965828598 +0.342451725155115 -1.85115955676883 +0.0643779030069709 -1.50555589515716 +0.510828340426087 0.140514445491135 +0.643092811107635 1.14819778688252 +-1.65937030036002 0.141747831366956 +-0.761942921206355 -1.17850649636239 +0.699954471550882 -0.372455329634249 +-1.88712094351649 -0.65960921626538 +-0.489870690740645 -0.824363277293742 +0.532602783292532 1.36076956428587 +-1.92731115408242 -0.399075402878225 +-0.871490010991693 -0.635850721970201 +0.757279542274773 0.626028940081596 +-1.79118252266198 0.525043859146535 +-1.6983589194715 -0.113593750633299 +0.00400733947753906 -0.495727819390595 +-0.239196976646781 -0.0602142550051212 +-0.0743708433583379 -1.90609625354409 +0.0358333187177777 1.34472763724625 +1.69053247757256 -0.482239564880729 +0.401755312457681 1.8230067808181 +-1.82128951884806 -0.532856147736311 +0.787280767224729 1.63229334075004 +1.18358636181802 -0.339216182939708 +0.210377236828208 -0.118449970148504 +0.973497432656586 0.128012899309397 +1.42372235655785 -1.20969012565911 +-1.39643080439419 -0.0946151316165924 +-0.624807418324053 1.05527243390679 +0.0425053928047419 -0.787287707440555 +1.34780309069902 0.0248478455469012 +-0.209626297466457 1.71176887955517 +-0.916090541519225 1.38105517718941 +1.32922930363566 0.0918987328186631 +-0.304428849369287 0.602612916380167 +1.49622493609786 0.329876381903887 +-1.27743435557932 -0.679570907726884 +-0.338090906850994 1.24495298601687 +0.917414885945618 1.77072822488844 +-1.05385500565171 -0.317732496187091 +-1.85359981935471 0.508420604281127 +0.968967654742301 0.813688435591757 +-0.856071394868195 1.42701591458172 +-0.350209249183536 -0.507401213981211 +-1.74049145262688 0.909214619547129 +0.9916061013937 -0.886418893933296 +-0.235337794758379 -0.217979777604342 +0.242088657803833 -1.85507016535848 +-0.422470128163695 -1.32302035205066 +-1.19766078423709 0.538899715058506 +-0.375107791274786 0.0306135304272175 +1.28781057801098 -0.392921675927937 +0.855979602783918 -0.946616781875491 +-1.22118548676372 -1.30974349100143 +0.810141676105559 -1.08776542078704 +0.824910023249686 -1.18424304388463 +-0.25059229042381 -0.506157064810395 +-0.583635673858225 0.93021291680634 +0.366686461493373 -0.287086035124958 +0.78670356515795 0.776175352744758 +1.80634332634509 -0.123501017689705 +0.63650044426322 0.937874531373382 +-0.185048126615584 1.77393245697021 +-0.533174636773765 1.6975283138454 +-0.83765874709934 -1.32247284892946 +-0.614690078422427 1.08982229884714 +-0.889990137889981 -1.30070949904621 +0.531700344756246 0.808090539649129 +0.333278903737664 -1.71225623320788 +1.62238882575184 -0.636384842917323 +0.161857576109469 1.58738652896136 +0.213063201867044 -1.13110741600394 +1.49858702626079 0.257086656056345 +-1.69954650197178 0.896495613269508 +1.45652342773974 -0.445964198559523 +-1.45688476320356 0.655100903473794 +-0.936333850026131 1.12416650447994 +1.6462351763621 -0.830213976092637 +-1.00484265573323 -0.0604327917098999 +0.373505194671452 1.93667324725538 +-1.41072622220963 -0.870120910927653 +0.54598705470562 0.617719517089427 +0.0869188364595175 -1.46444480493665 +-0.634122280403972 -0.877657096832991 +1.15856425836682 1.51485779136419 +-0.379012730903924 0.694917688146234 +-1.16485463920981 1.19454845879227 +0.972093256190419 -1.05229680798948 +0.660686255432665 -1.36331213824451 +-0.992027653381228 -1.57396158948541 +1.85816099774092 0.70840055309236 +0.91040968708694 0.376526228152215 +0.890480186790228 -0.105441044084728 +0.16265055257827 -0.697745285928249 +0.756558276712894 1.42416472081095 +0.7860645391047 1.43792303279042 +0.435398515313864 1.47040285822004 +1.50316876079887 -0.400206791236997 +1.09535242989659 0.900781291536987 +-1.72943006549031 0.131885098293424 +0.685884790495038 -1.65708542149514 +0.368428462184966 -1.28155228216201 +1.54464631620795 -0.43447198998183 +-0.0987075036391616 -1.98467881511897 +0.261568724177778 -0.0234177196398377 +0.466992215253413 0.219039267860353 +0.983803847804666 1.10482331458479 +0.673438201658428 1.64726109430194 +-1.25384159758687 1.34108667820692 +1.4751623980701 0.454772067256272 +-1.44106318801641 0.250486282631755 +-0.816908560693264 -0.592686829157174 +-0.702038485556841 1.6489031072706 +-0.152060138061643 -0.474894379265606 +0.0179518284276128 -1.63219705689698 +-0.0745407277718186 -0.772387254983187 +-0.339201717637479 1.77933822013438 +-0.623366884887218 -0.263006354682148 +1.79103057645261 -0.745756216347218 +0.145816784352064 1.86731151491404 +-0.640267346985638 -0.724180048331618 +0.318793029524386 -1.13749309442937 +0.419612131081522 1.08826220873743 +0.363113998435438 1.49207176268101 +1.31983574293554 1.39218769036233 +-1.60353180672973 1.17668760474771 +0.294691455550492 1.32209767214954 +-1.25262755714357 0.0811746185645461 +-1.06280097179115 -0.729529345408082 +-0.122855600900948 -1.33102152589709 +0.516095130704343 1.92469956725836 +0.261870495975018 1.35349979344755 +1.0340321296826 -0.0496042119339108 +-0.281893373467028 -0.426681960001588 +-1.325349310413 -0.275145697407424 +1.91672704182565 -0.426564837805927 +0.627828599885106 -0.397338953800499 +1.48122962284833 -1.24347684532404 +-0.454663848504424 1.19417859613895 +-0.646417403593659 1.64089467003942 +-1.1016696440056 1.50461402256042 +0.0355112571269274 -1.5385819543153 +0.0539179965853691 1.42554372828454 +0.752926928922534 1.69473777525127 +-0.314226813614368 -1.79588654451072 +0.323095470666885 -1.59558404516429 +-0.918455428443849 1.15842663496733 +0.526886876672506 -1.17056749854237 +0.00189852062612772 -1.2975707873702 +-0.590601332485676 0.929991018027067 +-0.0127936648204923 0.866553351283073 +-0.0806097090244293 1.0787902623415 +0.537219413556159 1.12184487376362 +-1.03874219115824 -0.627039686776698 +1.54579940717667 0.633308623917401 +-0.677155781537294 -1.3998046554625 +1.12750442326069 -1.10849260911345 +-0.452854135073721 1.51581254880875 +-1.26981559302658 -0.15737471356988 +-1.69710545428097 1.03598061855882 +0.332792579196393 -1.29911035019904 +-1.24441526737064 -0.0668949047103524 +-0.350745715200901 0.858911953866482 +1.49331387877464 0.300309894606471 +-0.31311774533242 -1.15510553866625 +1.033587154001 -1.39947705715895 +-0.432488684542477 0.851049389690161 +0.782038054428995 -0.00685703381896019 +-0.00375969801098108 0.468191662803292 +-0.144515683874488 0.123494871892035 +-0.108874102123082 1.76455713715404 +0.954006800428033 -1.67475530412048 +-0.866102834232152 -1.02468286361545 +-0.517944982275367 -0.974678977392614 +0.805324555374682 1.17763212602586 +-0.347201691009104 0.800200825557113 +0.177936647087336 0.460682227276266 +-0.950277778320014 0.449832829646766 +0.548672379925847 0.0028131240978837 +-1.51401010993868 -0.487505266442895 +0.0959115084260702 -0.415434795431793 +-0.0587226562201977 -0.56792264059186 +-1.52671920694411 -0.324689484201372 +0.943257221020758 -1.74858614429832 +0.773905484937131 0.828846413642168 +1.2633476164192 0.523271626792848 +-0.61621699668467 0.655402522534132 +-0.412077629007399 1.67840474657714 +-1.86701442208141 -0.568939263001084 +0.805213613435626 -0.46581319719553 +-1.23217539675534 -0.909689308144152 +0.501361647620797 0.694415795616806 +0.293092036619782 -1.10678871069103 +-0.540986381471157 0.625399260781705 +-0.702889588661492 -0.330918456427753 +-0.986708650365472 0.46220170520246 +0.532566634006798 -1.51578515395522 +-1.2084574829787 -0.577605288475752 +-1.754976821132 -0.284668148495257 +-0.0758434357121587 1.56297091301531 +-1.57111052051187 0.176556280814111 +-0.248468901962042 0.566306946799159 +-0.124083302915096 1.68269524164498 +0.484188127331436 -1.12927950359881 +1.07059596292675 1.64906133059412 +1.42502769175917 0.523253381252289 +-1.94254031870514 -0.305048674345016 +1.28008105233312 1.26271874457598 +0.73942718654871 0.0911062518134713 +-0.750666996464133 0.383170808665454 +-0.0876816092059016 -1.79285349883139 +1.49973246268928 -0.394707634113729 +0.512836982496083 -0.520357775501907 +0.761709224432707 1.64030943717808 +-0.541773518547416 -1.47441057581455 +0.864998748525977 1.67188465595245 +0.366919428110123 -0.215197463519871 +-0.03935906291008 0.338194142095745 +1.1311386115849 1.55728660989553 +-0.748077749274671 -0.644083463586867 +-0.623658154159784 1.4617858082056 +-0.194479061290622 1.51053973659873 +-0.199467263184488 1.40215781051666 +-0.630002336576581 -0.329208855517209 +0.224673711694777 -1.34853893704712 +1.90266969241202 0.228968515060842 +-0.494001852348447 -1.65174652542919 +1.76004229672253 -0.881889238022268 +-1.51422017253935 1.27790549583733 +-0.464729117229581 0.983514308929443 +1.79236230999231 -0.312690359540284 +1.03095644712448 0.131881535053253 +0.0885400623083115 -0.573928149417043 +0.488054059445858 0.408962006680667 +1.19815008621663 -0.548247537575662 +-1.01771922037005 -0.218163612298667 +0.820901143364608 1.55458909645677 +-1.37898220214993 -0.970521748997271 +-0.869291073642671 -0.387812126427889 +-0.664485419169068 0.412371492013335 +0.316340871155262 -0.830274171195924 +-0.428706239908934 0.859594405628741 +-1.44872474484146 0.305743894539773 +-0.878517276607454 1.70534064527601 +-1.43654842767864 0.0927427504211664 +-0.294434803538024 -0.597395896911621 +-0.541694312356412 -1.79239341616631 +-0.0685277227312326 -1.4208613531664 +1.41028638090938 0.611108485609293 +-1.3123237863183 1.36744117829949 +0.167985524050891 0.43017721734941 +0.00111907999962568 -1.087836435996 +1.37879544496536 0.17273261770606 +-0.243978032842278 -1.82541602291167 +0.44097357429564 1.69892525300384 +1.51186427939683 -0.354952475056052 +0.436683221720159 0.757554830983281 +-0.253146390430629 -0.730634852312505 +-0.743916993029416 1.62487828545272 +-1.30956849362701 -1.27671074587852 +0.205675113014877 -1.49241193104535 +-0.1694390559569 -1.72432750929147 +-0.0679993480443954 1.99241214152426 +-0.201975830830634 0.43133535888046 +1.30948458891362 -0.497067063115537 +-0.370208139531314 0.452199518680573 +0.569388413801789 0.427947605960071 +0.225626771338284 -1.44351467676461 +-1.02399174217135 -1.68831233400851 +-0.280384732410312 1.92384349741042 +0.51276945695281 -1.31349886767566 +-0.303669514134526 1.60553115978837 +-1.1016102405265 0.891206569969654 +1.10882835276425 1.00102004222572 +1.48380707018077 0.885332565754652 +0.681027566082776 -0.604764778167009 +-0.648824708536267 -1.1412191959098 +-0.591167412698269 1.72362730652094 +0.984175933524966 0.506870798766613 +1.2955664396286 -1.04916214477271 +1.51911998633295 1.19672674499452 +-1.30003277305514 1.51813582796603 +-1.72486586775631 -0.708601609803736 +1.28852394595742 -0.452583642676473 +-0.549260864965618 -1.55547004938126 +-1.18191092275083 -0.593209097161889 +-0.00550303887575865 -0.28543278388679 +-0.903777563013136 1.05424268171191 +0.293159970082343 1.0947403749451 +-0.318914515897632 -1.74280795548111 +-1.05659195967019 0.371543011628091 +1.09235224034637 -0.30982512794435 +1.19334563985467 0.834866208024323 +-1.48937911260873 0.894750597886741 +-1.19398561120033 0.827140598557889 +-1.16464688815176 1.39116507023573 +-1.81545768585056 0.643245266750455 +1.23293312825263 1.10603970568627 +1.47603194415569 -0.397040299139917 +-0.217999803833663 -1.36746110767126 +0.962013366632164 1.40871376171708 +0.0901227183640003 1.91612178273499 +-1.24901592265815 -1.55492759682238 +-0.278763563372195 -0.355834051035345 +0.245582467876375 -0.58913907315582 +-0.294840827584267 1.57383354660124 +-1.0079415794462 0.992876883596182 +-0.604946886189282 -0.0981140667572618 +1.83418258093297 0.775494455359876 +-1.45802195835859 -1.13687435630709 +-0.0873717963695526 -0.859247230924666 +1.90960247628391 -0.103782198391855 +0.0287037082016468 -1.60580974537879 +-0.637555975466967 -0.113946948200464 +-1.03070119954646 0.152783839032054 +-0.716840028762817 -0.930605352856219 +-1.32794842217118 -0.309799537993968 +0.667127463035285 1.29089135956019 +-0.0950463628396392 -1.40104437805712 +0.196413355879486 1.82967366278172 +-0.419874399900436 -1.19119545258582 +1.16789138317108 -1.12384881265461 +-1.44003539998084 -0.949007287621498 +1.42366111278534 -0.748216058127582 +-1.53946373332292 -1.24862470012158 +-0.549249214120209 -0.789961280301213 +0.720608862116933 0.721369584091008 +-1.73869198188186 -0.545266134664416 +0.306885438039899 1.36255151219666 +-0.598185080103576 1.08988415077329 +-1.25714449957013 -1.25810035225004 +-0.547108572907746 1.15372527390718 +-1.23331857472658 -1.08890081010759 +-1.29204262420535 -1.48442596197128 +-0.572557164356112 -1.03492512740195 +0.106153888627887 -0.351341242901981 +-1.68687341082841 0.155234360136092 +-0.978480624966323 -0.0079804016277194 +0.407186061143875 -0.339198056608438 +-0.702795685268939 -0.147880702279508 +-0.216922103427351 0.529926721937954 +1.72019202075899 0.35329931601882 +0.478086717426777 0.683098630979657 +-0.693910128436983 1.83396480139345 +0.725429119542241 -0.829038063064218 +0.1488152211532 -1.56199631374329 +-1.05414954386652 -0.605362146161497 +0.17736453935504 -0.443385245278478 +0.717369954101741 -0.18579020909965 +-1.08066543843597 0.769462006166577 +0.0239180205389857 -0.256743651814759 +-0.171147839166224 -0.223128356039524 +-1.75495660025626 0.0903156362473965 +1.02108997106552 -1.10111723374575 +1.3056376138702 -1.39672730397433 +-0.286937322467566 -1.49526039883494 +1.68266931455582 0.482448031194508 +1.51334469672292 0.155373022891581 +-0.169034029357135 0.694842941127717 +0.329164393246174 -0.797207104973495 +0.942079256288707 -1.17223348561674 +0.62545121461153 1.5042170016095 +-0.84048518165946 -1.4551960369572 +-0.381005123257637 0.168470611795783 +-1.20124269183725 -0.544722875580192 +-0.226115996018052 -1.16532138735056 +-0.707651304081082 1.12319677136838 +1.8520948626101 0.648638544604182 +0.457213896326721 -1.29595814924687 +0.11621599830687 0.345688244327903 +-1.7898002108559 -0.87425643298775 +-1.02739097550511 -1.26011626329273 +-0.353917836211622 0.668030629865825 +0.17760750092566 -1.09582354221493 +1.50872860476375 0.663248578086495 +1.61950562335551 -0.642613160423934 +0.655450050719082 0.76659977901727 +0.768497436307371 1.42207740712911 +-0.786584112793207 0.0819058995693922 +1.79284762684256 -0.527344328351319 +-1.63433905038983 0.199052474461496 +0.69770189281553 -1.52422331087291 +-0.606127789244056 0.544215066358447 +0.340332546271384 -1.75783724337816 +-1.29121596924961 0.637511776760221 +-0.533907796256244 1.85605951212347 +0.8815080197528 -0.105057619512081 +-0.177197530865669 -0.736537617631257 +0.209151231683791 1.71108884178102 +1.18635028693825 -0.120592872612178 +-0.157314047217369 -0.811951822601259 +-1.0709890704602 -0.942459169775248 +1.52232500817627 1.04690050426871 +0.730260726995766 0.136429166421294 +1.64127636421472 0.715187748894095 +1.25191152282059 -0.392671280540526 +1.13945696130395 -1.62371773459017 +-0.940619490109384 0.953810660168529 +0.177896542474627 -0.700615353882313 +-1.4706224091351 -0.58325244858861 +-1.18835747055709 -1.57186444010586 +1.26178041286767 1.11239678692073 +1.35967520531267 -1.03605112247169 +0.510199545882642 0.854808137752116 +-0.0958776827901602 0.625244665890932 +0.0896289637312293 -0.532392025925219 +0.489313459955156 0.510717202909291 +-0.234045987017453 -0.145814587362111 +0.395386947318912 -0.588048173114657 +0.0022148285061121 1.70961498655379 +-0.144987709820271 1.49454728513956 +1.51121956296265 -0.77498531807214 +-0.929421976208687 0.965432451106608 +0.28402188885957 -1.51079045142978 +-0.787255334667861 -0.947375265881419 +-0.44077823869884 0.716250008903444 +0.306685119867325 -1.7326660938561 +-1.09047345165163 0.734051308594644 +1.32755114510655 -1.31585030071437 +0.0364900138229132 -1.40702507924289 +0.0261624669656157 0.152912651188672 +-0.793217508122325 -0.293681401759386 +-0.616164890117943 1.16605194471776 +1.42292995844036 -1.10973529517651 +-0.615571206435561 0.58457615878433 +-1.01383019145578 0.159534086473286 +0.719533909112215 -1.8649508068338 +0.6386998295784 1.81978620868176 +-1.09765015542507 0.13137533981353 +-0.83268407639116 0.923827243037522 +0.882387948222458 1.42637366522104 +-1.47710384055972 -0.963989371433854 +-0.512973371893167 -0.103930183686316 +0.704065506346524 -1.81087840907276 +0.865568684414029 0.130104285664856 +1.28327278513461 -0.217643652111292 +1.5399327436462 -0.494020586833358 +-0.723582469858229 1.6954951044172 +0.979569318704307 -1.05827085953206 +1.25219228770584 0.735515993088484 +0.875737300142646 1.3030022000894 +0.82842911593616 -1.34958166163415 +-0.408383804373443 -1.93180974666029 +-1.53107350785285 0.570521070621908 +0.798633024096489 -0.775429286062717 +-0.635095929726958 0.656052981503308 +-0.312077599577606 -0.817826601676643 +0.595009297132492 -1.63130352273583 +0.469319432973862 0.958062013611197 +0.436267666518688 0.335050795227289 +1.77299451362342 -0.308416892774403 +1.7689714403823 -0.847577042877674 +0.963347971439362 -0.199723988771439 +-1.2426732070744 0.309501500800252 +0.221135288476944 -0.259337534196675 +0.238086159341037 1.89711924176663 +-1.55672038253397 0.0345130832865834 +-1.17743686679751 0.315419089980423 +-0.593357010744512 0.300014671869576 +-0.731889760121703 -0.934304980561137 +1.12130919378251 -0.364764255471528 +-0.291634718887508 1.53872386366129 +-0.374883819371462 0.204300245270133 +0.345302938483655 -0.934230741113424 +1.50408243015409 -0.0640950920060277 +1.56570907961577 -0.759912591427565 +-0.843439528718591 0.00662796292454004 +-1.17570565547794 -0.271273734048009 +0.537297420203686 -1.10065749939531 +0.807545134797692 -1.05543613154441 +0.328568586148322 -1.64385955873877 +-1.02844189852476 0.117460926063359 +-1.69172754604369 -0.235256078653038 +1.56267155241221 -0.751887621358037 +0.376965838484466 -1.12919521145523 +-0.439155458472669 1.9224639357999 +1.2656413950026 0.976358063519001 +-0.790428057312965 -1.3725940072909 +-0.428663493134081 0.141150335781276 +-1.4801678089425 0.559804748743773 +-1.54118897207081 0.833453028462827 +0.69589616637677 0.665138656273484 +0.0861239153891802 1.91374617256224 +-0.703354121185839 -0.488352747634053 +0.495539317838848 0.194689831696451 +0.0904112756252289 -0.6776093384251 +0.104857697151601 1.05050303973258 +0.829677079804242 0.0972552560269833 +-1.05421448498964 0.859094173647463 +0.711393262259662 -0.281246760860085 +-0.0469177085906267 -0.0473784515634179 +-1.69533256627619 -0.769959234632552 +-1.88491026870906 0.307504379190505 +1.02599249500781 1.20811075624079 +-0.976970738731325 0.0101862931624055 +0.695856332778931 1.10884378105402 +1.66244560852647 -0.880988263525069 +-1.10973585676402 0.296542251482606 +-0.797135327942669 -1.83031119965017 +-0.152726366184652 -0.746645559556782 +0.293497980572283 1.51542569417506 +0.974973545409739 0.657265993766487 +0.375979162752628 -1.15632839500904 +0.521603058092296 0.932896290905774 +-0.188180034980178 -1.990214927122 +0.631933954544365 -0.779725407250226 +0.301868692040443 -1.95518495328724 +-0.83448619581759 1.48303582891822 +-0.16851609479636 -1.50770523585379 +1.39187854714692 -0.602803525514901 +-0.989708370529115 1.38811058178544 +1.31810354907066 0.00937964487820864 +0.275151737965643 0.759774810634553 +0.311578042805195 -0.837495636194944 +-0.544234721921384 0.360509539023042 +0.037600452080369 1.6830225000158 +0.758925779722631 0.4872139794752 +-0.873795201070607 0.522495612502098 +-0.337266219779849 0.921654752455652 +1.3873586980626 1.38106559868902 +-1.51990294177085 1.24477603938431 +0.794113910757005 0.923503846861422 +1.46354923583567 -0.729098360054195 +0.583051397465169 0.445183060131967 +1.45600240770727 1.11114133056253 +-1.32795316074044 -0.916493078693748 +0.447782916016877 0.541946711018682 +-0.297732142731547 -0.228628354147077 +-1.15934183355421 -0.574152137152851 +0.515000123530626 0.251582423225045 +-0.422014993615448 1.62649315223098 +0.372650053352118 -0.680430747568607 +-1.59195608273149 -0.983214833773673 +1.41574683878571 -0.855122163891792 +1.37419092189521 -1.30244998913258 +-0.562079617753625 0.382639192044735 +-0.311532577499747 0.262296811677516 +0.317690350115299 1.49278462678194 +-1.58694644551724 -0.80209559109062 +0.638921156525612 -0.0374610926955938 +-1.26463075727224 -0.229469402693212 +-0.32876131311059 -1.12172300275415 +0.444769328460097 1.51642330549657 +1.13508984725922 1.60275645554066 +0.311958356760442 -1.18528520129621 +-0.114681357517838 0.21015099529177 +0.0108386380597949 -1.3388813585043 +1.00712447334081 -0.434213301166892 +0.898770538158715 0.209376004524529 +0.855546439066529 0.952932493761182 +0.987379910424352 1.11900888103992 +-0.401319783180952 -0.644115380011499 +0.542358223348856 1.76742877718061 +0.562496883794665 1.51392408274114 +-0.660034496337175 0.209428753703833 +0.248265171423554 0.56587372533977 +-0.742750999517739 0.0411192001774907 +1.02479051519185 -1.48075046762824 +-0.615514861419797 1.86688240524381 +-0.830662439577281 -0.541701973415911 +1.05052307993174 -0.995578657835722 +-0.351463308557868 1.08680951781571 +0.803177641704679 0.729247038252652 +0.24734453111887 0.959920850582421 +0.687431037425995 -1.00002343580127 +-1.65442028082907 -1.11920255701989 +1.04500534851104 -0.16468281019479 +0.0348880384117365 0.829151354730129 +-0.926750335842371 1.01552339084446 +-0.537274803966284 1.418681926094 +1.57845917902887 0.0810078075155616 +1.43218318093568 -0.36840551905334 +-1.290797078982 -0.142875283025205 +-0.248789057135582 -0.0537120830267668 +-0.781719627790153 0.292087344452739 +-0.99677639734 -0.8278364604339 +0.711334086954594 1.78705327771604 +-1.50466077122837 0.586245684884489 +-0.71028833091259 0.511130730621517 +0.335058229975402 -1.58793208748102 +0.116543317213655 0.123687698505819 +1.54619203601032 1.14495624322444 +-1.41449856944382 -0.355529246851802 +-1.2348214937374 -1.45921229198575 +1.66807542275637 -0.534349417313933 +1.78386364039034 0.316976308822632 +-0.785036983899772 1.07744982838631 +-0.720802272669971 0.444619132205844 +-0.736048057675362 -0.979674808681011 +0.1792066777125 -1.53333666175604 +-1.45238041505218 -0.412556655704975 +-1.02971820626408 1.40463888831437 +-0.885529940016568 0.454354170709848 +-0.0452278386801481 -0.987809431739151 +-1.01732064131647 -1.2437630398199 +0.82401807513088 -1.11070443224162 +-0.0335870012640953 -0.48829604126513 +0.570299827493727 0.975777290761471 +1.53460266534239 1.20738339889795 +-0.454594499431551 1.79365417174995 +-0.654473204165697 0.593520757742226 +0.480774127878249 -0.678216082975268 +-0.957169882953167 0.408680038526654 +-0.452035897411406 -0.963346372358501 +1.06673084944487 0.435446774587035 +-0.604701039381325 1.15179566200823 +-1.53006815258414 -0.822736317291856 +-0.470983122475445 -1.30493600666523 +1.09412675444037 1.45866917353123 +1.62405963335186 0.281855780631304 +0.913082651793957 0.720608253963292 +1.47948940563947 0.420519945211709 +-1.31953532714397 -0.0246847001835704 +-1.15779944974929 -0.112400435842574 +1.03784591611475 -1.28192956093699 +0.139881500042975 -1.23954091779888 +1.43602010793984 0.874712347052991 +-1.02993900515139 -1.0248845545575 +-0.793591351248324 -0.442435105331242 +-1.81176156643778 0.263773633167148 +0.046660172753036 -1.51295201014727 +-0.657499022781849 -1.66950703598559 +-1.66451021563262 0.0733083449304104 +-1.13010209705681 1.3281811689958 +-0.0883160252124071 0.297623469494283 +0.801828724332154 1.03467931970954 +0.0439272252842784 0.482158035039902 +1.31740083172917 1.19280627369881 +0.452268804423511 0.855251928791404 +-1.66006302181631 -0.198214079253376 +-0.985275657847524 -0.306268483400345 +0.178759410046041 1.69644083082676 +0.139931192621589 -0.510596830397844 +-1.80148993059993 0.581965659745038 +1.6284782551229 0.13589302636683 +-1.89247811585665 0.523138955235481 +0.926641160622239 1.22953593637794 +-0.446741735562682 1.74635378923267 +0.197006580419838 1.80014662351459 +0.589887064881623 1.45389039814472 +-1.72131506446749 0.592583757825196 +-1.27602784335613 0.16603990457952 +1.51109243370593 0.49569143448025 +-1.2395899714902 -0.32918085064739 +1.16932813543826 -0.921622755937278 +1.39076484180987 0.780189398676157 +-0.0865559205412865 -0.512894510291517 +-0.170803253538907 -0.476614478044212 +-0.467791395261884 1.24167127721012 +-0.27755431830883 -0.275804696604609 +0.0205053072422743 -1.78494607936591 +-0.84697768650949 0.413094833493233 +-0.158745618537068 -1.53472704160959 +-1.34203832689673 -0.288522812537849 +-0.579468158073723 1.11878581251949 +-1.49279324989766 1.33051978610456 +-0.463266409933567 -1.2428871979937 +0.566981103271246 -1.2353208931163 +-0.665077437646687 -1.58590637054294 +-1.19297550059855 0.0625232951715589 +0.540892152115703 0.169513555243611 +-0.638809520751238 1.09174461010844 +-0.455766371451318 -0.790236273780465 +0.662181506864727 1.37558942753822 +-1.58882613293827 0.0774991773068905 +-0.616454396396875 -1.87675006128848 +-0.114016203209758 0.842405299656093 +1.45665185991675 -0.559145011939108 +-0.336709185503423 -0.270529911853373 +-0.681662308052182 0.671735799871385 +-0.331360975280404 -1.39555899705738 +-0.80343777500093 -1.25722717493773 +1.45709312986583 0.371091981418431 +0.422423077747226 -1.38044566009194 +1.42635214328766 -0.266853653825819 +1.0377590386197 -1.66052043624222 +1.01917412877083 0.695380113087595 +1.73890689574182 -0.128077837638557 +1.37358665373176 -0.266205729916692 +0.151086435653269 1.38979098200798 +-0.496290025301278 -0.252012230455875 +0.68668386247009 -1.48011113423854 +0.0177875813096762 -0.787024886347353 +-0.595566548407078 -0.866666684858501 +-1.17710474506021 0.452883277088404 +-0.743077359162271 1.61193505302072 +-0.403202107176185 0.671058892272413 +0.833635579794645 -1.23729669954628 +1.18142198305577 1.48874783143401 +0.394018718972802 -1.74172662664205 +0.280769625678658 0.967538070864975 +-0.956466767005622 0.270605994388461 +-0.264356646686792 -1.93795197550207 +0.0893658427521586 -0.939904840663075 +-1.58761573117226 0.483732042834163 +-0.957831108942628 -1.46049216296524 +1.32528744172305 0.391475247219205 +1.5640006903559 -0.458672697655857 +-1.78785020019859 -0.735544421710074 +1.35135382320732 -0.320783580653369 +0.601938913576305 -0.0984608391299844 +-0.8113325657323 -0.91215683054179 +-0.334252106025815 1.86516291368753 +-0.54119486361742 -1.0480954637751 +-0.413558843545616 -0.282965496182442 +0.518237682059407 -1.06359652616084 +0.433894509449601 1.73792155273259 +-1.27199852466583 -0.79047904536128 +0.921797399409115 -1.50649712141603 +1.01482593081892 -1.25652006175369 +-1.04773109778762 1.46276128198951 +0.714802775532007 1.50737161841244 +-0.896324751898646 -0.0146931167691946 +-0.511559243313968 0.564312198199332 +-0.227895353920758 0.821695943363011 +0.0169521626085043 1.0257866345346 +-0.60925212316215 1.5197336114943 +-1.3490028064698 0.374774748459458 +-0.74082266818732 1.8478246377781 +-1.56575552839786 0.817794761620462 +0.82809548266232 -0.809369056485593 +-0.730974818579853 -1.45797798223794 +0.161785984411836 -0.940417403355241 +-0.0362446065992117 -0.447048583999276 +0.245054854080081 -0.954041257500648 +0.638196179643273 0.879150977358222 +-0.0251069460064173 0.513987053185701 +-1.30470841936767 0.0761530790477991 +-1.1917147487402 0.517909730784595 +1.15008150972426 -0.854444329626858 +-1.17579466570169 -0.305731507018209 +1.01692835614085 -1.04690181557089 +0.0501257497817278 0.0464099515229464 +-1.01087663881481 -1.02637975756079 +-0.404583084397018 -0.893223070539534 +0.0559048941358924 -1.21035133115947 +-0.42090770509094 1.7834610035643 +-1.59118476044387 -1.05318497400731 +0.829802964814007 -1.69237840734422 +0.356697210110724 -1.63503530249 +1.63563888147473 -0.949815786443651 +-0.33548969309777 1.57890895940363 +-0.306581119075418 -0.206751497462392 +-1.36674803774804 0.582382326945662 +-1.52913266420364 -0.924544407986104 +-0.443889328278601 -1.35246144514531 +0.417659766040742 1.39030277170241 +0.712600598111749 1.09385246876627 +-0.59676107391715 -0.50933497492224 +0.164284623228014 -0.306459390558302 +1.21458974294364 -1.41466171573848 +1.66936383582652 -1.01770622096956 +-0.59725993219763 -1.59722836408764 +-0.29087426699698 1.81666373554617 +1.09779544733465 -1.29166564904153 +0.516360103152692 -0.421763032674789 +0.855776055715978 -1.45628353673965 +1.07508763298392 1.35065372474492 +0.246559586375952 -0.577343482524157 +0.203784381039441 1.4795304434374 +-0.155233951285481 -1.36775204539299 +-1.00837675947696 0.93706488981843 +0.0772836124524474 0.971738868393004 +-0.829441552981734 -0.756649824790657 +1.08787356782705 -1.11807894613594 +0.582525528036058 0.179856671951711 +-1.50838598888367 1.12530351057649 +-0.294651906006038 -0.782255541533232 +-1.15348944533616 1.04907440207899 +1.25058274250478 1.37957459036261 +0.165137558244169 1.47722280956805 +-0.317757084034383 -1.6735241021961 +0.159999747760594 -0.663217906840146 +-1.15338715258986 -0.535817659460008 +-0.178906688466668 -0.166212199255824 +0.381188116036355 -0.327305997721851 +0.0221287095919251 1.44373411126435 +-0.182388367131352 -1.25078183226287 +0.103421477600932 -0.231758296489716 +1.6765955593437 -0.329154009930789 +0.108329905197024 1.33887871168554 +-1.9925231821835 0.0213536852970719 +-0.108932117931545 -1.35737143922597 +0.578591015189886 1.5829489286989 +0.593595879152417 0.274068120867014 +-1.23690119758248 -0.771171107888222 +0.0330616785213351 -0.234989884309471 +0.520214329473674 0.810812762007117 +-0.0925285900011659 -0.789919492788613 +1.30162520892918 0.255201406776905 +0.0785638615489006 0.727097000926733 +0.281766322441399 -0.735557875595987 +-0.0508041242137551 1.20796711463481 +1.62734560761601 0.122758312150836 +1.16054895427078 -1.32981141563505 +0.106075030751526 1.19251193664968 +0.689048919826746 1.10176935326308 +-1.30426577292383 1.29473721049726 +-0.542432297021151 -1.35681091807783 +-0.632117763161659 -1.68240509461612 +0.836055185645819 -1.49830533377826 +-1.17219204921275 0.192874475382268 +-0.862362221814692 0.590387370437384 +-0.61212769523263 -0.754163251258433 +0.532454659231007 -1.79826826322824 +-1.23814881872386 -0.394996179267764 +0.741987370885909 0.538800988346338 +0.455283262766898 0.113339141942561 +0.762146341614425 0.0985372392460704 +0.694346585310996 -1.18940708693117 +-0.556768977083266 -0.19841328356415 +-0.328740163706243 -0.453239479102194 +0.394131612963974 0.130489866249263 +-0.190344872884452 -0.0192092694342136 +-0.43504345882684 -0.242069719359279 +0.836925474926829 0.452082131057978 +-0.222023765556514 1.93275196757168 +-0.151929195970297 -0.428292389027774 +0.3924580514431 1.74833800364286 +-0.0746122561395168 -1.58671064395458 +-1.9946396574378 0.0130880251526833 +1.19051989261061 -0.344716581515968 +1.04757412243634 -0.85289242118597 +0.346477000974119 -1.29980584792793 +-0.398665097542107 -1.55346148461103 +0.861384804360569 -1.58875999134034 +1.05565949063748 0.929882677271962 +1.49782165605575 -1.30785795487463 +-1.05717379786074 -0.44439038913697 +-0.00545010343194008 1.88809101190418 +-0.293355907313526 -1.57001116033643 +0.934476302936673 1.73850458860397 +-0.760175189934671 0.619933987036347 +-0.116550916805863 0.31672424543649 +0.0689476309344172 1.06760337110609 +-1.12612480297685 1.5545618114993 +1.73640192300081 -0.383532059378922 +-0.154693421907723 -1.83795704878867 +0.00561558548361063 -0.260093169286847 +0.351836933754385 1.39957998972386 +-0.240056734532118 1.44003069680184 +-0.274040377698839 -1.06224725581706 +1.04637711681426 1.35517049673945 +0.796862880699337 -0.926081391982734 +0.77948493976146 1.1046355208382 +-0.385508052073419 -0.577272996306419 +0.885531142354012 0.374623492360115 +-0.738488861359656 1.59851285256445 +-0.294896636158228 1.50530454609543 +-0.118352700024843 0.751791656017303 +1.83458659891039 0.763392519205809 +0.276848142966628 -0.627989591099322 +0.198204305954278 1.55415993183851 +-0.965346896089613 1.74663467146456 +1.24042096175253 0.360297702252865 +-1.08026906754822 0.0281726224347949 +-0.392678933218122 0.486390750855207 +-0.0520683592185378 -0.0122058549895883 +-0.70393608789891 -0.244592070579529 +0.432312408462167 -1.63205261901021 +-0.285521300509572 0.540345161221921 +-0.44073950778693 -1.94070954900235 +1.53675019554794 0.171777036972344 +-0.516839077696204 0.694707987830043 +-0.0543823316693306 -1.65257409587502 +1.17925004009157 -0.412131839431822 +0.717728287912905 1.42584210447967 +-0.553022363223135 1.28118508961052 +1.43640910368413 0.574120986275375 +-0.379532656632364 -0.85127760283649 +-0.978875409811735 0.113057801499963 +1.4644192783162 1.02404158003628 +-1.31216923426837 -1.24314045812935 +-0.0639631468802691 0.603732258081436 +-0.309344563633204 -0.789931298233569 +0.48447347432375 -1.17820374760777 +-1.00474072620273 0.138587209396064 +0.597883236594498 0.177071297541261 +-0.35951466485858 -0.559077773243189 +-1.40784593392164 -0.206193679943681 +0.628039943054318 1.88317033555359 +0.460768551565707 0.279600772075355 +-0.645910832099617 1.58562660962343 +-0.171903840266168 0.841492540203035 +-1.41926888469607 0.120397062972188 +0.120376911945641 -0.443307884037495 +-1.33270786888897 -1.30976195726544 +1.52910458855331 -0.195199891924858 +-1.25487603060901 -0.955086532048881 +-0.0635397098958492 0.0426249783486128 +-1.69824744481593 1.02783168852329 +-1.21491647511721 1.10925155784935 +-1.82084869127721 -0.243309675715864 +-0.497696489095688 -0.0969118755310774 +0.434841864742339 0.732801315374672 +0.984661881811917 0.628354391083121 +0.50211448315531 1.19017517846078 +0.393021203577518 -1.50612392183393 +0.643143159337342 -0.286876948550344 +1.2475446164608 -1.30731332954019 +1.7807036517188 -0.559430585242808 +-0.780744485557079 -0.320345582440495 +1.39007860887796 0.0669972086325288 +-1.44604905042797 0.755969520658255 +1.66181718278676 -0.946355957537889 +0.282613515853882 0.474915129132569 +-1.76856970135123 -0.103618717752397 +0.155654016882181 -1.62901589181274 +0.817213643342257 1.13437230885029 +0.189842213876545 -0.818608742207289 +-0.241830915212631 -0.4185800999403 +-0.689355921000242 1.67017219122499 +1.7081085415557 0.358107012696564 +0.26203790307045 0.0362963741645217 +1.74448285158724 0.556258151307702 +0.240780309773982 -0.249960512854159 +1.5712562026456 -0.501207681372762 +-1.47499735280871 0.88674933463335 +-0.501748934388161 -0.620949627831578 +0.822148379869759 0.188567340373993 +0.15395416226238 0.0337743703275919 +-0.52516320720315 -1.9218394048512 +-1.80592098087072 0.492886534892023 +1.59698864538223 -0.386823789216578 +-0.80177101586014 0.794467891566455 +1.57968135364354 -0.560811094008386 +0.978329004719853 -0.787602736614645 +-0.912789452821016 1.00366698019207 +0.884372201748192 -0.96411703992635 +0.634919858537614 0.635765318758786 +-0.952819909900427 1.32470160536468 +1.56201795302331 -0.956944534555078 +1.74842861108482 0.0699937213212252 +0.566707109101117 -0.193644423037767 +1.02064577303827 1.08190088905394 +-1.33723836857826 0.446267042309046 +0.191959393210709 -1.40859754104167 +1.77578135486692 -0.457586772739887 +0.48813231755048 -1.56270545441657 +-1.41894588060677 -0.180320700630546 +1.0090023111552 -0.328791121020913 +-1.68189627490938 -0.594042068347335 +1.00731677841395 -1.10872753802687 +0.385563982650638 1.84786182735115 +0.46696568466723 -0.455331910401583 +-0.665068889968097 0.356519031338394 +-0.0595175297930837 -1.9761496912688 +-1.14409968443215 -0.332036604173481 +-1.48075337801129 -1.15356951579452 +-0.607344607822597 -0.484723097644746 +0.672143515199423 -0.384848373010755 +-1.26006450969726 -0.285009535960853 +0.881920601241291 0.251503366976976 +0.648684580810368 -0.81356421019882 +-0.749023489654064 0.528532957658172 +0.476838054135442 -0.242659313604236 +-0.0239269947633147 1.84732797648758 +1.42098575364798 0.377978096716106 +0.538022367283702 -0.268792019225657 +1.86448822263628 0.110014945268631 +-0.58037050627172 -0.707389807328582 +-0.219043103978038 -0.746720960363746 +-1.40147164277732 -0.677051988430321 +0.81263914424926 0.996741810813546 +-1.5363792553544 -1.04574332386255 +1.55895188450813 -0.418514842167497 +0.931446042843163 0.336160787381232 +0.322011385113001 -0.149178232997656 +-1.14928907621652 1.4782686708495 +1.55051083024591 -0.948065057396889 +1.97485943138599 0.107909103855491 +-1.19166759400166 2.03518285461239 +-1.62924688773698 1.12023798794023 +-1.0611915136548 1.43381805902816 +-1.11889065428174 1.38904764967508 +-1.16487038506663 1.37075075366072 +-1.21386756280906 1.70279495602942 +-2.59205360662661 1.65866690081081 +-1.95068436393983 2.10405848639554 +-2.04233706461734 2.69935005110226 +-0.770742088542349 1.80117554542146 +-1.9701433521556 0.939237546627439 +-0.912189898998505 1.83577940285674 +-2.58101412699095 2.03210811874366 +-1.88825945601336 2.55615367273337 +-1.74618136695123 2.76563740520275 +-0.899753502556152 1.6594069152931 +-1.40521427102855 1.36383235685996 +-1.04314481372482 1.24033821326218 +-2.14240012828729 1.08288475729994 +-1.38437164620034 1.55662149375773 +-0.850298542049742 1.44856909940488 +-1.03841847645587 1.77898091029844 +-1.12415278142787 1.0930896508368 +-1.48514996467627 2.03420163317121 +-1.69600676091856 2.2516203676211 +-1.81134080047256 2.02142473960764 +-1.43476751771293 1.07956740305296 +-1.85344790945298 2.0144348479668 +-1.7368111046823 0.821041857809788 +-0.994567952570534 1.2759450320015 +-1.53974354405425 2.6866367901536 +-2.18061248537622 1.29447355539075 +-2.1929702092754 2.03666146936364 +-1.57463265814356 1.68212418908722 +-1.54607386373452 1.40521256577692 +-1.35628809731922 1.57427176366097 +-2.03943404973022 1.42275557619057 +-1.36272256307772 1.40664096268362 +-1.01021159671716 2.15651999831668 +-2.17514015919066 1.78532139955557 +-1.20313911278269 2.4501990312571 +-2.55325556545056 1.95865291029683 +-0.874133176156348 1.47952371787748 +-1.95128421530596 2.71799123099542 +-1.47170731656722 1.99347874567381 +-1.79465832513294 0.914845121544815 +-1.80998723254076 2.28132948697082 +-1.40527645590252 1.55203179641164 +-2.53265631383753 2.07960277934096 +-2.37756757436789 2.23777506731919 +-2.54953125769741 1.35819511859215 +-2.30431373276285 2.42464346856347 +-1.32859053368217 2.55367811488456 +-1.47892023528463 2.03218364667616 +-2.48267647250093 1.51077896262056 +-1.00033944098747 2.13395558668769 +-1.27630399724937 0.956750537254876 +-1.57398284597791 2.34670890447221 +-2.45108784128449 2.23178234182767 +-1.23110370290003 2.45753892742566 +-2.11628849057949 1.41660601970277 +-2.29473834390289 1.33623908086217 +-1.79050427813551 1.55824354429266 +-2.0275130839879 1.05980519948996 +-1.80216226026884 2.39492832262538 +-1.13028041691057 1.38822295468769 +-1.23359633561692 2.58554913707546 +-1.78682026684753 1.25562372057236 +-1.83440723268784 2.3933745459514 +-2.10529728744648 1.18887815623081 +-2.62544822086117 2.17733929817102 +-1.73055768058173 0.779762963750428 +-1.92840985067657 1.58325735757596 +-1.65491230598188 1.85140080096669 +-1.24405157564676 1.2215249589505 +-1.99882246226541 2.06222033685542 +-2.12700846370778 1.33522108661479 +-1.22087018842346 1.43858185967899 +-2.66361454267523 1.65774209330327 +-1.99463732772044 2.28386535726956 +-2.08511877850167 1.18398063442207 +-1.17263599623463 2.3324235337647 +-1.74672923095993 0.973433778141654 +-1.89814841233662 1.71941511797256 +-1.28243707631252 1.43860276926777 +-2.58162748486272 1.96828393171079 +-2.21365857216067 1.38841047462544 +-2.54430622326679 1.54339400863743 +-2.44902191170029 1.88444778787441 +-2.200922690318 1.12049048113174 +-1.60814429771624 1.72824961737267 +-1.75492319928951 2.46457960874311 +-2.20571912171788 1.80319014655895 +-0.875514161293424 1.93742652189455 +-1.60288089372686 1.38024536053187 +-1.96067796506456 1.32620979518167 +-2.35897769917271 1.47078241220213 +-1.88591681419409 1.11959487012125 +-1.48855962137229 0.834660524022794 +-0.792933227014757 1.76319552910051 +-1.48469570044897 2.62431030998475 +-2.41045733027629 1.30777779452971 +-1.82054761423267 2.36174192371315 +-1.81438926838807 1.47477288216821 +-2.09699616486675 2.42900866454936 +-1.4730270854058 1.58988298474348 +-0.867546089910931 1.55000719054139 +-1.25821902368686 1.21694654411174 +-1.06336186126343 1.65915775344244 +-1.67771772247276 2.32320523353762 +-1.32715533812068 1.67826557716823 +-2.51541410012639 2.06872576566882 +-1.12216207709632 2.37513398284561 +-1.29558614919431 2.20746803002901 +-1.20154741917274 1.04701113978869 +-1.8361042514304 1.91614048740364 +-2.59794556860349 2.08298629567496 +-2.73372715291104 1.74417224944106 +-2.28472533308438 2.22646006102211 +-1.16955454646147 1.75181275045968 +-2.44730032979048 1.35878223412207 +-1.23379324328623 1.77436720159373 +-2.5438143140497 1.4173959754268 +-2.49425101418517 1.24258283198348 +-0.977752680876321 1.50492971575356 +-1.54640233654997 1.12666190041698 +-2.1309082517261 1.04901470521933 +-1.77741091603882 1.01209564039952 +-1.57876812246165 2.61696089150853 +-1.1782548576454 1.21348930521405 +-1.83174975913487 2.65439325243703 +-2.6619502692344 1.63840270739517 +-2.59361323712862 2.28141507830939 +-2.18166760540105 1.08505793612919 +-1.89539417995608 2.00402424389965 +-2.20240815261847 1.22945073664657 +-1.83056651240683 2.71710144727028 +-1.81013125459201 1.69902300181336 +-1.24494680760897 1.07195019208363 +-1.21703546399719 1.64926553097091 +-2.36251944395251 2.55660830809272 +-1.23749539068437 1.42882067615635 +-1.76962709332354 1.69409035703636 +-2.20839878309987 1.95820826104379 +-1.45148535723335 1.41447394432969 +-2.43029403219468 1.54687295433602 +-2.56270962439946 2.16373720344625 +-1.15098296523563 2.17882653324402 +-2.01884537514768 1.63806295533202 +-2.52736594200603 2.20257224886215 +-2.40011429971553 1.06612304969228 +-1.39857354060284 0.886925617218978 +-1.96638499618045 0.822644602493412 +-1.91129151637203 1.96975674618504 +-1.99354833968452 1.45419068865947 +-1.69931606574453 0.793259164770163 +-2.48895926073662 2.4368151244568 +-2.02110185472763 1.13920148879758 +-1.03057487959511 2.09174012717328 +-1.51746582098774 1.89388339220083 +-1.49812578641347 2.13548706843055 +-2.56769695271275 1.47039415685929 +-1.62189081542662 1.60272958780176 +-0.98380476141817 1.2680092011439 +-0.910782340434409 1.40511447422675 +-1.72681691542736 1.26025277793876 +-0.997412938610679 2.02992958806924 +-1.32005186936355 1.57968101788781 +-2.19878386425323 2.1820569564449 +-2.5975508978451 1.50241198389328 +-2.37465727001897 2.06890058748893 +-2.32345457932449 1.0692403982254 +-1.35090943797029 1.43926060198343 +-1.22359934447802 2.25575250572062 +-1.48314291749618 1.16748524316124 +-2.22354501996658 1.72935829356781 +-1.84371076429731 2.49908447124533 +-1.54100365609399 1.77673479354478 +-2.10604495134673 1.10521982329435 +-1.72405296562708 2.41095234312586 +-1.55322265530474 1.07342903919987 +-0.997433097553468 1.64237723032481 +-1.81501867481045 1.1577365826676 +-1.03922242437027 1.94399415607101 +-2.40125864789358 1.0322285033623 +-1.20706320506058 1.63525838869161 +-1.08797422422565 1.05589043207935 +-2.26201687425844 1.75067920003555 +-2.29337019658185 0.922859147642768 +-1.09787730778105 1.72299859717957 +-2.40108791225127 1.19511489615313 +-1.12306604952089 2.26709287332839 +-1.16697555266789 2.15995026192165 +-2.37625527799598 1.77842149043924 +-2.5203160596872 1.55154944722078 +-1.39335660821295 1.49071798239372 +-0.848666631602711 1.57333894119135 +-1.90208534662149 1.77561559014491 +-2.41579394041098 2.47230404939971 +-1.86190373914785 2.00591002403296 +-1.54835704619533 1.95229211308531 +-1.18971544316507 2.40861362636558 +-1.81789532721511 2.01521166226364 +-0.939525657785154 1.85020926313943 +-2.67065597137905 2.11304215293846 +-2.21728697510905 1.18935574170671 +-1.18320864437617 1.31210427124522 +-2.64771238433666 1.78018756861336 +-1.79188505884728 1.9044718239317 +-1.03741833659357 1.83363874394915 +-1.98403837157882 1.41458995732016 +-1.65633188279814 2.70780996744058 +-1.40469650503717 2.44736862693451 +-1.21122448144949 1.92198327135763 +-1.87666094488002 1.0479246023903 +-0.934562865165091 1.59256841660014 +-1.74453232016883 2.18024900367952 +-1.56316236358604 1.65862845907456 +-0.965223541003681 1.18955773439727 +-1.97820017112888 0.790948266615189 +-1.43824088572062 2.62965051921553 +-2.4641840648698 1.43594895208723 +-1.78746105729139 1.06046480684078 +-2.05777969629041 2.36377891803629 +-1.71069052441514 2.05676378448031 +-1.03567552844531 2.22282330085061 +-1.49421687096826 2.57753286695576 +-2.10356439089871 1.50278743774167 +-1.36330928745217 2.5144149609702 +-1.73061054058842 2.72078938873953 +-1.57463980511299 1.01552860297731 +-1.07163745163477 2.05075418202675 +-2.21751271838791 1.65609969569839 +-2.3578949780064 2.19608947819567 +-2.09669907575942 1.94117601354144 +-1.20256128440148 1.92053733299024 +-0.919491512686974 1.65091221685059 +-0.993743726969725 1.66961057994983 +-2.35258941350973 2.47407818165145 +-1.94380576441533 1.09510601031131 +-2.17337201394609 2.67905815503097 +-1.77726011116526 1.05693938814572 +-1.38989822704814 2.14002279097683 +-1.8662602123665 1.10764912607617 +-0.841827872660971 1.68981445253364 +-1.83687374460048 0.990998357520467 +-2.27821585068948 2.0565880425135 +-2.25683995707429 1.0791333103413 +-2.21722627243496 1.71481709693096 +-2.03486401074103 1.00881839029959 +-1.41103755401916 1.52227739708967 +-1.80214318352289 1.65001077948607 +-1.53202530460901 2.72444082562349 +-2.21927878352351 0.984921982963598 +-1.17830823127649 0.961745855850196 +-1.46721947890244 1.90960681483618 +-0.916427475851989 1.92051976265854 +-1.3730903616967 0.856497839544929 +-1.3084312039288 2.11403986577413 +-0.880801729843981 1.89795272842518 +-1.44713051895148 1.59576999580956 +-1.35279018057979 2.15921830058566 +-1.42488990350164 0.881587003809429 +-1.25148383759431 2.33617951905823 +-2.08745286752007 2.24122254549063 +-1.22771459211133 1.75871379113293 +-0.976684891604847 1.39168805581055 +-2.46777529733724 2.21174201181627 +-2.09462140545718 0.908810374313063 +-2.53914094109557 1.36520306624941 +-1.26132425757594 1.62193274124063 +-1.76538003234661 0.883356131133652 +-2.14535014516211 1.01457065901256 +-1.2025370127382 2.54387862466744 +-1.88582301138437 1.3676535216952 +-2.48515936963729 1.65607937377415 +-1.48013627388052 0.971084019620932 +-1.27576622924305 2.52400848268069 +-2.66380359836391 2.04378456015608 +-0.873973647118575 2.00880722700155 +-2.61206244629345 1.58169995759151 +-1.04677310910544 2.37915060947186 +-2.4005571743334 2.50596579814798 +-2.24076203115753 1.6207564072544 +-2.26197382154412 2.23188852051742 +-1.89163734436504 0.890258821644849 +-1.38403230624742 1.79848478779665 +-2.57945910653568 1.55346184688157 +-0.97539784757889 1.64075045984707 +-1.35140367223418 1.08354829436637 +-1.73503289137504 0.892647104789532 +-1.90002814865208 2.42242630755997 +-0.780537592698849 1.72939293690494 +-1.20920232262931 2.00190380167685 +-1.62368235735691 1.42154423844538 +-1.22984012168369 1.52452587241775 +-2.40890884211755 1.86386096662379 +-2.18179306572772 1.04626948897264 +-2.42723879775501 2.40911366975403 +-2.55431563359222 2.25670473834968 +-0.86568408504597 1.94901816405824 +-1.18373148886955 1.98902614972091 +-1.34065223670102 2.35713109093255 +-1.53104912488259 1.01049957469574 +-2.29101496141872 2.06798528971619 +-1.46410238927699 2.56393608589194 +-1.9463897910761 0.806216265526629 +-1.2401649295213 1.39229207950181 +-1.21718244020364 2.15919404708318 +-2.48409021039031 2.23599024394996 +-1.87476152925289 1.0525727383653 +-1.96953518221623 2.47696180919654 +-1.80343762830368 2.18407173220537 +-1.76272171548105 2.63059280904478 +-2.17759188635743 1.41433430880777 +-1.01238321837506 1.90073421828917 +-2.19669676244727 1.86405327542222 +-1.55228658112235 1.71341816334522 +-1.85320127288348 2.0708837904828 +-1.91534569671846 1.90892031508035 +-2.2255735462392 1.67646339627451 +-0.922918687094963 1.93393976891882 +-1.37497156950182 1.31371633707083 +-2.5605723042833 2.2718468396785 +-2.48267896937675 2.24520805150739 +-1.73838358464188 2.58862794235817 +-1.71507382810584 1.98369362529836 +-2.21786243886224 2.45035795214123 +-2.50352654977239 1.50450739728755 +-1.38890046718068 2.4275368172536 +-1.74736715153328 2.2891439767415 +-1.59432074565909 1.7765840766812 +-1.4938112259703 1.80258287813983 +-1.91313565195075 1.86361354867465 +-1.72916978456548 1.36703503968663 +-2.43671092110223 2.29087452449373 +-1.84644425286449 1.61973378805912 +-0.918369373758352 1.80318453114948 +-1.96557398045904 1.69161523784048 +-1.20803046690769 1.56912135424561 +-2.07314084981105 1.3709603939066 +-2.26028135464064 2.62002261028162 +-2.34418540493122 1.30723500343508 +-2.20790091528094 1.84674339320979 +-1.67272947494409 1.92142191015652 +-2.24230296768717 2.59111194404311 +-2.29599373134322 1.83694311276547 +-2.01345372710846 2.29801510319582 +-2.03077936729885 2.5875808531279 +-1.5920262005972 2.34908270322255 +-1.98797604440249 1.60782002842522 +-1.36776173672176 2.51128324212975 +-1.6307862703549 1.44308089696339 +-2.42484606009445 1.995383551796 +-1.39060005870185 2.48820042841605 +-2.57986831989608 2.24490622243381 +-1.88472906778104 2.64600676436446 +-2.60895329293332 2.08870455947242 +-2.29156397607646 1.29261197753719 +-2.25881522752813 1.68510908212981 +-2.53269189222253 2.31935529735408 +-1.25926875973589 0.98501546260438 +-1.58349218218125 2.01291449040643 +-1.51250968033916 1.6527306171926 +-1.37599126527554 2.57623490177563 +-1.95026136421136 0.856398098622478 +-0.986752137993491 1.17861721011347 +-1.42948826494164 2.60245989401362 +-1.63994252820036 0.899384984266973 +-1.83835192954636 2.11473747993357 +-1.34342075556985 2.49980798734821 +-1.49157879081092 1.12597731173507 +-1.20123671692333 2.37506143887065 +-1.03926155481956 1.51878426734827 +-0.951014179256773 1.9843508796627 +-1.79086232742763 2.34899463922254 +-2.45821722362659 1.2434138031537 +-1.50792365612931 1.95985062279276 +-2.09466984726957 1.90640356576539 +-2.00737927666403 0.897821431499935 +-1.7642239387939 0.917935313177324 +-1.51634423479906 0.844624096979416 +-1.20121678429193 1.11525823127947 +-1.42294188168368 1.34385122264273 +-1.81287782311908 1.79099293657116 +-1.18509183869234 1.91125411259479 +-1.87871272255204 2.45255864596463 +-1.96493920123673 1.30534282744399 +-1.15305151137061 1.34563791377938 +-1.80413446899972 1.53918157516516 +-1.20333189236469 1.62576587403662 +-2.46696216518528 1.60118205657697 +-2.30961867094508 2.29508235909513 +-2.39824582914359 2.2750555160316 +-1.3064146074327 0.933106421944714 +-1.47693019126079 2.65952746894679 +-1.79425820753447 1.15078595605216 +-2.49873909632213 1.62216446361042 +-1.39032590108461 1.9834412829811 +-1.78358856633774 1.94250072770289 +-2.03378301578111 2.17295679960868 +-1.16451884991027 0.987719556768454 +-1.17344779147304 2.08665531965441 +-2.43722015757582 1.81215945397041 +-0.803589937728858 1.95742542434952 +-1.45858420756183 2.35571068756751 +-0.915222370517469 1.36230543673507 +-1.13993466876916 1.73066217963121 +-0.916801125262893 2.26493491463832 +-0.934133980874932 1.80083107434682 +-1.77611844705886 0.967866508276231 +-2.1954318284849 0.919567816876835 +-1.88333789211235 2.39910360238052 +-1.78313419035173 2.10865839941896 +-2.1658484623628 2.26056094745643 +-2.3850623555342 1.87700923856816 +-1.91910108363724 2.63901918799154 +-1.28119170757479 1.77851391209758 +-2.46687856734268 1.74960225400052 +-1.05354965057693 1.59385008037559 +-2.16036286529622 0.987047544666595 +-2.09486795448235 0.992892998256749 +-2.01764522584624 2.15157435972712 +-1.17012630634478 1.11613914089746 +-1.34016390935055 1.68375927115328 +-2.30146255063362 2.34681912510193 +-1.98421674592889 2.37979672475255 +-2.61671161044858 1.40462889707184 +-2.69883498222104 1.59026206842161 +-2.0485437516915 1.43206162516496 +-1.59167871390007 2.48501328750051 +-2.28939871255777 1.96501034636519 +-0.960925750420458 2.27479414873294 +-1.39191143818668 2.22515889488629 +-1.34671726635194 1.00509810539431 +-2.07738948518879 2.34453338709197 +-1.51429312209136 1.3757714037644 +-2.29772256546191 1.38337137409023 +-2.30699090052865 1.99158532962672 +-1.00423157772518 2.03449934114239 +-1.4026854736591 2.66773513051218 +-1.23717779525093 2.5961756999465 +-1.98896160906649 2.16399304520807 +-1.59348430064044 0.788328454815811 +-1.78009903289846 1.25635132052473 +-0.997726169866539 1.83566660851709 +-1.48461776411629 2.13627312843225 +-2.16518735371999 2.64726757023952 +-2.36741443815178 1.12302512767262 +-2.04591767535082 1.34056163903794 +-1.87323970765344 2.23436296869538 +-2.28506458549298 1.07481113447345 +-1.28956728892869 2.2921586586023 +-0.800310299252189 1.91301441750026 +-1.91184279274664 2.37778916813902 +-2.18776946746281 1.0739179346571 +-1.35226939803458 1.87995107843078 +-1.40435375039003 2.34652877271644 +-2.71592314539946 1.48063131839505 +-2.16589130040727 2.22149034262172 +-1.99718514269686 2.01731428968257 +-1.78687398180834 2.62864962824158 +-2.64922716886274 1.81522842960901 +-2.28713578356898 2.14915429100863 +-1.28258923998571 1.43054128576539 +-2.28573849330193 2.15153556548528 +-2.25576702084861 2.51344583042435 +-1.85006100869275 1.48781097492672 +-2.57214767437897 2.26288142193577 +-1.24353512468285 1.87105808312542 +-1.04256127540491 1.58837098533756 +-2.02434073387182 1.49226297250486 +-2.18143625881991 1.39518492388076 +-1.95064965028784 1.86993219329513 +-1.75165139416701 2.13476962268821 +-0.995852124397672 1.54753071673683 +-1.63662130104846 1.73649274416736 +-2.14211634917653 1.05265616531021 +-2.30219276527411 1.47760393795095 +-1.49314193612433 2.38010861909486 +-2.01689080659769 1.1531629003445 +-2.35315670444167 2.20304072600327 +-2.32147837149448 1.61370933194182 +-1.73342657134405 2.45110113451726 +-2.28198589429728 1.71426550204403 +-1.59009776775262 2.53747561991683 +-1.49225419385827 1.51945130572192 +-1.25181855585895 1.05108130256183 +-1.37982841579712 0.923487232984072 +-0.833544759065873 1.98180188097528 +-2.30568154387645 1.32168749019972 +-2.28204665238506 1.19122921953446 +-2.090291033485 2.61571080888159 +-0.944126566392398 1.87283497022933 +-1.14712131953335 1.52787560223139 +-2.0437368098507 1.69660945049024 +-1.43901960410646 2.47246731350667 +-0.951100760125584 1.89607267592571 +-1.5299227670627 1.80136344720147 +-1.82280354153833 2.07519403843678 +-1.43332734004132 2.40256244082472 +-0.808317691073782 1.65790274970702 +-2.68245245654605 1.87412361659578 +-1.08180879334456 1.32489426199815 +-1.46876813516489 2.46150421722329 +-0.914077093810803 1.99693859693131 +-1.42782214841343 1.12098052304707 +-1.91569607982881 1.75722256860233 +-1.33926112808756 0.877415154115326 +-2.03816841075665 1.87657374701 +-1.9171236036206 1.62043526953555 +-2.20159483699597 2.64607405195481 +-2.18872656718365 1.76600426899738 +-2.25110352969266 1.44944278987593 +-1.55588801902256 1.1043935869681 +-0.862797421389288 1.63485720752276 +-2.28329470612577 1.11379553143404 +-1.67254574269525 2.34954129577152 +-1.37680535994939 2.0889830360981 +-2.03516101183838 0.938902536858833 +-2.06253960634119 2.51770219205416 +-2.27864107767589 1.3764513879875 +-1.07484733731023 2.46823339162863 +-1.54841257583818 1.96782859506554 +-1.57738551728896 2.34252446050293 +-1.55528460488192 1.33163354823834 +-1.36888064675502 2.51980278360195 +-2.70981968285984 1.82673987092919 +-2.6539201433631 1.88959334929011 +-2.05531940374992 1.95710617162756 +-1.44504649935759 1.2375392587716 +-1.55128307685725 0.914436041829205 +-1.33429612962998 1.20868271401621 +-1.64613771902866 1.13116380105264 +-2.74610508325047 1.82617928505412 +-1.95300543399996 2.02952976570002 +-1.69807968799493 2.16634562330789 +-1.09076592650733 2.41518943755425 +-2.36365286166317 1.8616429045657 +-2.64318324065304 1.40693642430476 +-1.2181373336213 2.5858705793368 +-1.71047505781523 1.87213956656403 +-2.31868141236252 2.29533063153274 +-2.39242510589308 1.28786799869037 +-2.27909677660561 1.45528098240963 +-1.67104674082718 1.23303059719971 +-1.85755733911417 2.26498867337129 +-2.47654932270295 1.36580039093739 +-1.73139282268009 1.56775317619092 +-2.02024190545551 1.78586996950678 +-2.39540742895103 2.17534142265565 +-2.25215358146004 1.04304080455101 +-1.13047276529021 1.97110393409154 +-0.966097292942024 1.96137971383057 +-2.2638832791569 2.33545528723396 +-1.05436059272385 1.27285773026294 +-2.41098427677996 1.6311383140134 +-1.35130497765265 1.07544869835026 +-1.46397202041469 2.3640861487808 +-1.50965064250401 2.24958366014532 +-1.76530294677711 0.770050006439007 +-2.35336924016944 1.46188327249608 +-1.90353892229966 2.51043818517125 +-2.03269065775446 2.36823684115431 +-1.39353355344853 1.91917385112963 +-2.07966913514308 2.69821909720547 +-1.41811607078186 1.02488258805594 +-2.26521889864004 1.25093244713268 +-2.24383556934542 2.26841231899805 +-2.28691512438006 1.1343289473886 +-1.72596124791077 2.58997196004263 +-1.50255491350315 2.28644152543045 +-1.48779484628237 2.22996508934072 +-1.16980293624572 1.53001578057653 +-1.95437759252734 1.16194218023217 +-1.85233780926562 0.869147833888448 +-1.27077775214336 2.00081671679861 +-2.37512846010274 1.66605737338311 +-1.18451283265374 2.46224029547072 +-1.19388463207058 1.66119739132471 +-1.09705987291879 1.02862716464794 +-1.16128206484488 1.09818287425212 +-2.34688631769738 1.30978012921757 +-2.35926646272189 2.15964755135126 +-2.21732054509691 1.9162894794578 +-2.29255456243179 1.73565295989162 +-0.802065442767954 1.72913033700085 +-1.39329557957596 2.01354709964714 +-1.86064493002839 1.84552873250566 +-1.36367908444724 1.41019183163068 +-2.08827063719279 2.69530903174079 +-2.52259370031304 2.37414543189577 +-1.82957305990628 1.17301149478651 +-2.13811979767404 1.66739998803011 +-1.82270920927174 2.16902470261327 +-2.1375275039906 2.57005352898038 +-2.48188079426534 1.61663131507582 +-1.81805616662375 2.33154498504943 +-1.49538474295757 1.26829844851515 +-1.89529685581572 2.3431101296889 +-1.65306675665549 1.05735867981336 +-1.65917981228328 2.55938762797511 +-1.02334828952796 1.86998878185175 +-1.10170995256401 1.84351222867615 +-1.94521452298856 2.36574036859444 +-2.35854739042468 2.26996419577397 +-2.42511982329658 2.39156863883606 +-2.07422761990226 1.75515391396827 +-1.27596436905212 0.951466179904557 +-1.57233872868589 2.03488373009957 +-2.53754696574531 2.1590919825109 +-1.90478678605056 2.3492863681028 +-1.19304771626837 2.56090303648732 +-1.86033128712795 2.43055748006798 +-2.26501153314955 1.4685598374205 +-2.10570554294161 1.94760321172423 +-1.92809801593891 1.68152541945226 +-2.29168487245686 0.97979415858633 +-1.91742519846655 1.70699755287266 +-1.84462667674295 2.72026074257217 +-1.61373237751893 0.832265594494558 +-1.38897058040372 1.35067523176319 +-1.98368292905859 1.89210972002245 +-0.903501677778578 1.78109448541424 +-1.68689751856498 1.26639894767202 +-1.18860749572984 2.52583131611816 +-1.81434679169676 2.09057033619381 +-1.96274769397921 2.01891568581111 +-1.10140958240724 1.49745748237244 +-1.07785311339892 2.11819790488578 +-1.04078514034397 1.70522333959586 +-1.87073249740995 1.97905487426094 +-2.43097104968972 1.68687585786454 +-1.18720848903528 2.50549054563514 +-1.88867572693623 1.30193663806192 +-1.59415262097961 2.49250365280083 +-1.5836744732084 1.07247715395412 +-1.22294899866453 1.23882977243981 +-1.54539692959285 2.29386863157622 +-2.74182445973626 1.86369442891798 +-1.6423097546073 1.43008258937395 +-1.37925228335432 1.89413210189439 +-1.15472200604624 2.15858545758299 +-1.2847896776954 1.02499409158251 +-2.42126876952595 2.52105463853574 +-0.9751082914742 2.25229712108619 +-1.19032705713532 1.53386406785346 +-1.93033770749814 1.22647642435975 +-1.90540455839134 0.791943837874598 +-1.67109167667574 0.87371571919418 +-1.68052767867691 2.36603271447591 +-1.06100592714272 1.11262497630439 +-1.71489828870169 1.71156009750909 +-1.85572072070561 1.63658915877811 +-1.32327554280407 0.919790230211831 +-1.10082816517449 1.844728183453 +-2.06273695685438 1.4160212888243 +-1.77218328647784 2.50937230278275 +-1.22168115711308 2.34071325090251 +-2.49185768560044 2.37715528950564 +-0.832486571264482 1.94793833790816 +-1.73564488346226 1.82720341438896 +-2.15115405943713 2.63074717063925 +-2.70675542157612 1.96342987519226 +-1.93294152103833 2.60010320878125 +-1.32239975778855 1.02561025366656 +-2.47603369595952 1.37094571020923 +-1.65367270166523 0.98726588486231 +-1.86809436416722 2.62607566917784 +-1.79867167862601 2.40379135961182 +-1.26151695613287 0.949752124943799 +-1.52226178253538 0.885163065032131 +-1.29042189706585 1.24878398281059 +-1.97873140683866 0.896468247738696 +-1.2454581498029 2.43449286137245 +-1.94168645980305 2.39605015747718 +-1.61130358929283 1.9467328060279 +-1.96506982295311 2.71633901026568 +-2.15036877289883 1.32657761963553 +-2.39173348851971 1.52632923236585 +-1.43632115096322 2.35598179929427 +-2.16141257340557 1.12991501762069 +-2.42997747146062 2.29474305499814 +-1.6660002074065 0.876136372317589 +-2.23721187234393 1.29796123596377 +-2.49347857337914 1.7538599199476 +-1.58025636708832 1.87700297868348 +-1.81325263202659 1.50814400844744 +-1.04395950910173 2.45123212550304 +-1.17411973558835 0.968226422076142 +-2.28180182118437 2.52386607600845 +-0.874913685475505 2.12529428720943 +-2.2411796324747 1.73051199920944 +-1.2203510808619 1.05292189771778 +-1.550661450238 1.97663723908833 +-1.95429748667873 1.68916661197788 +-2.06568915581799 1.46118270045615 +-2.39637975207067 2.42459228167779 +-2.43871990170798 2.23732770046688 +-0.878375766319252 1.40615294686056 +-2.57195920495785 2.07223828554622 +-1.54244736021853 2.41182057744361 +-1.65702164451129 0.962631132439739 +-0.888745458782679 2.24139705205805 +-1.74068233462519 2.11384395741395 +-1.42629558731339 1.02617629336662 +-1.12348519171125 1.33253718119583 +-2.5667067142064 1.28346826553813 +-1.35576747682414 2.44143100779972 +-1.88509088939524 2.09912709890402 +-1.68213126946709 2.53409848453946 +-1.19282591015569 1.29962228889114 +-1.64961551081858 1.20168431660629 +-1.98203679964415 1.56953360604591 +-2.02349474791906 2.24568452451847 +-1.28281109778679 0.968387682908243 +-1.81922828078738 0.846552584306366 +-0.904647311647451 2.17114275599367 +-1.22032588532663 1.54909537591509 +-1.77614193315155 2.16945963817186 +-1.51344495032451 1.81828201607338 +-2.21148723362482 1.34070772210605 +-1.58795024546332 2.33665958418048 +-1.94681635730437 2.21731402025095 +-2.36766309010333 1.45341393536425 +-1.48634334512523 2.58094888551629 +-1.97566684008113 2.13748463847212 +-1.94273413902617 1.74578788223214 +-1.68888307640798 1.66256491465963 +-2.46973539934031 1.53580520859457 +-1.54260481380395 1.92207081102303 +-1.5884433383914 1.78478423038966 +-1.64584862485997 1.44115736892915 +-1.96020296447448 2.34638368942312 +-2.38824616324625 2.49239312017805 +-1.60939935453705 2.3641061102832 +-1.70834356917537 1.61359074384443 +-1.97704886503986 1.00993014824114 +-2.24074095998428 1.91669838082514 +-2.68829777602575 1.78357885636858 +-2.01516013125426 0.842326317899889 +-0.996921980273939 2.00159032688013 +-1.23593199810482 2.18105052130765 +-1.56216493620074 1.68486283764712 +-1.6555180419097 1.19097672494597 +-1.12476848157591 2.24914810386024 +-2.12849551065362 1.52239086850084 +-2.05372382466219 1.30109817859254 +-0.926437938106275 1.87867298208646 +-2.04322406881026 2.51507562723479 +-1.89671259837694 1.35575888630471 +-2.58520197261593 1.32194857251368 +-2.1772360540787 1.9899771539355 +-1.83164730480409 1.34980744773991 +-1.19079513567037 1.04338097803763 +-2.21049719302452 0.893680572495825 +-1.57391290784351 2.00781528370923 +-1.38187531659848 2.1629657973396 +-2.3134979177127 1.6787497955235 +-1.96683443010322 1.48799296327404 +-1.61121040301866 1.27596148381478 +-1.48793338470629 2.23003947128079 +-1.03943766791842 1.42544991149104 +-2.48912259003616 2.10949416084684 +-1.88428677151448 0.856332522337354 +-1.38527199484876 0.845571548198379 +-1.82746849580205 2.61364304459191 +-1.33381040486044 1.20010322470686 +-1.73542608459017 2.6743219569186 +-2.15467694866008 2.03599405380434 +-2.72959034702278 1.89172647659204 +-2.43479568742684 1.12465570446572 +-1.22103935478724 1.36626855189449 +-2.72749817882202 1.6698674652586 +-1.42222236188597 0.948770998497522 +-1.44721959017686 2.2230774578499 +-1.13873170407957 1.62081033269412 +-1.59274495112247 1.96432915375105 +-2.20078902038283 2.11663851466498 +-0.970262335133768 1.97064263651616 +-0.9332426381158 1.70753963942177 +-1.29641536156184 1.83185633270538 +-1.34014348032422 2.40001580304003 +-2.33708680140323 0.952731636804557 +-1.16860558789692 2.20270304706416 +-2.09326897001363 2.17451299585871 +-2.36878610121555 2.09306363014973 +-0.964920774553931 1.46732982479504 +-1.20891246989838 2.29106760768688 +-1.25573057866193 2.32651321309156 +-1.15211634383074 1.22683627715803 +-1.52375847099817 2.25904941743708 +-1.60767226804516 2.63287506158001 +-2.49683226410768 2.05602754418275 +-2.43763173882387 2.2143696644204 +-0.973480112371064 2.15100728206805 +-1.23381765418223 1.83426547840706 +-2.32054338882214 2.55432966955475 +-2.26580022413752 1.4933389476361 +-1.295315287062 2.01334954238034 +-1.33838377053387 1.06248610511891 +-2.61340321366212 1.60122071531138 +-2.37417624842844 2.40631611969791 +-1.97502958819739 1.60527123136914 +-1.25853221582717 1.65215772668368 +-2.40352650078482 1.07202467786617 +-2.28189623727 2.56766390519685 +-2.5937743959809 1.38900953390173 +-2.43498979880012 1.2793477279926 +-1.09851335825867 1.35072819933764 +-2.14728266465015 1.28297128806359 +-1.2159657021845 1.23316345641858 +-1.16631269267297 1.63573320767379 +-1.14648117915264 2.03519306656442 +-1.6608972586552 2.72120029058776 +-2.02148599269337 2.68695926525167 +-1.5878287828388 1.64436044542587 +-1.6453506685654 2.6257502538944 +-1.89584148347846 2.67251027932859 +-1.13403110763527 1.26258597614712 +-2.01828943843491 2.36482594859324 +-2.14743038777731 1.45226188421718 +-2.16464886626697 1.6159443473304 +-2.43238120571247 1.74880427632949 +-1.16625098556749 2.47955781592525 +-0.898548569981969 2.15127105450726 +-1.5829447614821 1.35992842631883 +-1.49522365395448 1.84169690542392 +-1.64753459206319 2.0288848765055 +-2.13470110575206 2.31895271966702 +-1.73246129926763 2.52950905355162 +-1.81741893383212 1.68865527257792 +-1.14637175806335 1.73824336611203 +-2.17903680958322 1.14678211089692 +-1.55958845139018 1.17237396676562 +-2.55324027080557 1.63642079381741 +-1.88415976891763 0.956128455684549 +-2.10729584655262 2.61130588965959 +-2.20778051669247 1.58955932872838 +-1.55289511753715 0.8754605511064 +-2.55459140053487 1.58852587592325 +-2.12171355773232 2.52973078841812 +-2.19890325002841 1.91070450049362 +-2.05236362897328 0.915855044019437 +-2.66158073068134 1.53377867209262 +-2.52828999188266 2.18704690829388 +-1.76789628247268 1.95397552473939 +-1.41565587742723 1.23595209371767 +-2.37508477362341 1.12799211636654 +-1.37211924324281 1.52376025634355 +-2.01010402709714 1.94466175975747 +-2.72973595371938 1.95010428501762 +-1.81126760503746 1.03343821967489 +-0.876889089108474 1.59833725587002 +-1.38618819041646 1.6930095935101 +-1.89133786222435 2.01970129551835 +-1.47041791582949 0.974063874208397 +-2.62489891749344 2.13445186706728 +-1.99222291818357 1.47759279281369 +-2.12838105995677 1.60002292877532 +-1.17373223554812 1.37390889598525 +-2.25836203292481 2.06815690685532 +-1.76480964150748 1.15995656278453 +-1.25137677880473 1.7198676904524 +-1.33137312669776 2.57511027481875 +-1.68753138509116 1.32430725124202 +-1.06303493444345 1.15843385549731 +-1.58936710980258 1.60222216601021 +-2.24886317363477 2.40007066306195 +-1.13510591302536 1.57914191669322 +-0.966632714946783 2.08862640614159 +-1.3583668498183 0.942522611920751 +-1.38318956967912 1.67520880651197 +-2.23503563831098 2.00104246706239 +-1.30546674736313 2.06965846846349 +-0.823117478736705 1.55778064297981 +-0.947828007848031 1.67382966492794 +-1.61646441136024 2.20151969463102 +-1.57895243399314 0.883458075565315 +-1.8473195279809 2.51415084254465 +-1.15196261786393 1.26524360849059 +-0.886166471042699 1.49063643951438 +-2.02202494115106 1.7723169480311 +-1.79119848458565 0.911143648889876 +-1.77218037469781 1.84860848680607 +-1.85898442704699 1.28930105686656 +-1.8571364250826 1.29700299351013 +-2.75961250763855 1.84311220281295 +-2.16132850757337 1.88108619440815 +-1.99950618062637 1.61793703072242 +-2.06288429813928 1.24866653837732 +-1.5987874474237 0.794205672093696 +-2.6100317756506 1.69542759050152 +-1.98814465756065 1.87732784634925 +-2.49797551472209 1.65983369386292 +-2.49128469869963 2.22307439333296 +-1.3733090231987 1.30597993171311 +-0.980535542112804 1.68961965850092 +-2.18074122116438 2.25652279237754 +-2.57462168669797 1.55285479313231 +-1.7151351049403 1.45578743748835 +-1.84777012698821 1.65888964616231 +-2.14315878505356 1.0729221119406 +-2.15381608576051 1.10584785648159 +-2.01801729247443 2.00207731219477 +-1.93955814138523 2.13117604934148 +-2.40046726957432 2.02942085311285 +-1.27402997388042 0.979799776789761 +-1.74763198474891 2.62633870028428 +-1.81500957904852 2.34992802141703 +-2.25823322356216 1.48115027880765 +-2.2256991662877 2.45228824763096 +-1.73999517968393 1.04746959543324 +-1.93846098378769 1.43160047520421 +-1.86661691124088 1.12071482416711 +-2.72753450273073 1.6527629983472 +-1.7739465795318 0.893532491275138 +-2.61196589515082 1.2521869288618 +-1.57625274684749 2.64858923385388 +-1.86375400748572 2.461707092342 +-0.897811169256485 1.50669762630484 +-1.82196739687122 2.12207300307698 +-1.1489110528352 1.91658992784566 +-1.40790946221448 1.63037274078003 +-1.92555504337422 0.917237263624585 +-0.993068057564116 1.97508272527254 +-2.55078528773508 2.16654476149476 +-2.44266041180587 2.03906978898219 +-1.37893307254187 1.34303674612663 +-2.50904104624412 2.13608302314303 +-1.7245052112127 1.79551828418396 +-2.01402250629387 1.93440663394055 +-2.14358150259129 2.2675853320258 +-2.16162498759574 0.879988967892743 +-0.99679384053222 2.19993460596076 +-1.52480906107 1.60813903294972 +-2.07685571721292 2.65248935994229 +-1.32530357175044 2.04490455890543 +-2.00221210856459 1.90367567118726 +-1.34834838517479 2.21445835007823 +-1.76704455165661 2.59424614718652 +-2.06733247589789 2.06271907406396 +-2.26744662782646 2.285275021554 +-2.06949844861872 0.91290685859046 +-1.7787585817138 1.4314311323548 +-1.17557098854802 2.01388362328059 +-0.85464245871178 2.02521829352251 +-2.6829827315966 1.80305204213134 +-1.97065953117333 2.50281520372725 +-2.63991871847308 2.23151452232621 +-2.71937560521535 1.46650322432167 +-1.24327150125525 1.40057478704027 +-1.18123288311533 1.72455755293838 +-2.58416833308063 1.68146152699835 +-1.15372766878924 1.33322620437018 +-0.956005895496375 1.77474620331547 +-1.68250487650282 2.07905653350524 +-2.31819429526574 1.38623002123556 +-1.67986598097256 0.791222341344721 +-1.5350642748969 2.42030169906563 +-2.16506221563093 1.07934751593045 +-1.10231184166393 1.30073819727174 +-2.53534058806024 1.99969457274771 +-1.36937288286633 1.15003472565211 +-1.37919676116205 1.4746691207262 +-2.09371080387852 2.09134759147189 +-2.4110332773859 1.74127588885374 +-2.08282454869247 1.94653056327722 +-1.82208545551173 2.60587040752641 +-1.44454122426457 1.87967693198941 +-2.37340140015355 2.14772385962776 +-1.55824216872922 0.993482902632124 +-2.21182690885387 2.14294106139339 +-0.948134665363497 1.68033075936607 +-1.99096132906622 1.13070351768754 +-1.73131682722366 2.07638013780586 +-1.20084091600499 1.25476951523221 +-2.74811311483852 1.92446255077145 +-2.35098840248308 2.51829918984792 +-2.02954021048269 1.21949052669576 +-2.25784325877673 1.82615553493149 +-1.41116030326582 1.02887875073127 +-2.08095729116819 2.59020497275985 +-2.14530110544062 2.43302587325222 +-1.53380191697276 2.60294677967674 +-2.10108517760879 1.54296208777002 +-1.63733266292608 1.8425549077662 +-0.871709488783992 1.94079470539934 +-1.91038941497452 2.64028330797798 +-0.98209102729804 1.85645557612649 +-1.22863686735279 0.975611605201877 +-2.33779652415312 1.45483424466572 +-1.55100696672223 1.77335756922609 +-0.98051368117801 2.01887687713376 +-2.32805923091716 1.25672622815243 +-1.26706491362772 0.938634209320909 +-2.4155343691679 1.15679964654616 +-2.55657076834238 1.23675114799759 +-2.73483189287133 1.57637029977984 +-1.23902735485233 2.18913140519059 +1.51641048536173 2.64584364088721 +1.41217875618956 2.54306554140992 +1.00431444820485 1.54337767975873 +2.23269212291113 2.51574033217973 +2.7334272912564 1.82074539874218 +2.17402625874153 1.44791204946584 +1.8767371778237 2.07784436502031 +2.22352894720159 1.76662823603026 +2.31010317288808 2.22932953618935 +2.29047133818737 1.64348908931485 +2.07341579113624 1.39590168836064 +1.82181606719739 2.55133109258956 +2.309794433401 1.31084963584787 +1.32693161348349 1.96057262177116 +1.99186424350835 1.78259615878112 +1.78114800898827 1.23770844096786 +1.64800335092939 2.41009643299974 +0.887563740354991 1.49013894748411 +1.9615378961759 0.958107030005223 +2.58355011975861 1.22468129963151 +1.31985145433343 1.77425322680271 +1.47522024482957 2.59489092788196 +1.03602749666668 1.23585799148775 +1.25890176369346 1.36804030880801 +1.57269692047036 1.39664963957391 +2.5203771111416 1.44312355014033 +2.07839646235577 1.24473748960069 +1.07770240538291 1.62260306485466 +1.00033256689555 1.58867411453745 +1.72838064281738 1.25119179625533 +1.6677311560931 1.99653974593154 +1.34874974469191 1.20704458280004 +2.18922622034795 1.52196944736413 +1.29071008040107 1.55822258953474 +1.65689123886175 1.78453903550751 +1.29746335044972 1.01665741390086 +1.2262816293957 2.15325245259798 +2.38718787765599 1.77984646287284 +1.62639909515626 2.75224470019809 +1.85807727601848 2.6569582354138 +1.70463304825596 2.08067903805993 +2.0154902124638 2.73415037615693 +1.559628950418 2.33094036810569 +0.973813397330261 1.90141605118758 +2.51532992672644 1.12379826697058 +1.52641335640571 1.30526996635369 +1.03787648676432 2.0906137837889 +1.00253282811961 1.66562007599047 +2.60362998813859 2.31369561886883 +1.81448864888868 1.18709999321497 +1.0102913183952 2.30245785134382 +2.51898869963831 2.16778977726123 +0.929676037267811 2.05045694064817 +0.768451056429214 1.7617541798251 +2.07391820148102 2.65870069711006 +2.11600911988414 1.8519330732385 +2.04483318187765 1.70073235219813 +1.72149563113696 2.31371907609052 +1.49193585848904 1.28584832650147 +1.13711477535314 1.84839998323939 +2.11757379116959 2.41859052841089 +1.75695437610618 2.29678566845602 +2.51342967663429 1.29900527324996 +2.38336883251093 1.41937145216859 +1.3718901709514 1.19123414250559 +1.69113728216386 1.37605982738084 +2.18526027117393 2.56143873230091 +1.44283340683675 1.98927929856352 +1.38281153513587 1.9822503845068 +1.29568064397669 0.952171975267148 +1.95164470186926 1.86463363374121 +2.45069526972718 1.5989431571076 +2.57403231597043 2.22508579118646 +1.9824593569944 1.75462035321168 +0.819295898531592 1.9543851385685 +1.20677644012964 2.25923859140373 +2.37826722113884 1.87579222208357 +1.49361403843856 1.60298917208335 +2.06663867317609 0.825460809269017 +2.09512595644689 2.32261162529237 +1.59568007475234 2.19944994150198 +1.04274064999542 1.92389794810212 +0.990595072028256 1.59914257795087 +1.5740937730158 0.817436082740343 +2.36374245442919 1.51676241819284 +2.52180148520044 2.0953111518035 +1.47970168058298 1.92943733649797 +1.38773189995906 1.83847394118554 +0.987116428078181 2.28941456785894 +2.6169251077669 1.64953785574532 +1.33713982113174 1.26984580278865 +1.61379462386019 0.981401719138986 +1.20523225747517 1.81550065193794 +1.98657642117239 1.09585289748854 +2.29638502605728 0.937023122780985 +2.28399933686948 1.97103491563819 +2.08427156692839 2.66799131034411 +1.33750954408667 1.67167442849374 +1.87757217184178 1.54492943034045 +1.53827404415703 0.918086228878266 +1.43409752285577 1.408367400975 +1.36428931447215 1.02678127166352 +1.06764707852624 2.022936528069 +2.63605786392934 2.11230862325526 +2.16494105661756 2.48413002117074 +1.95709373381457 0.797053780858434 +1.83875469908587 1.44940616701267 +2.22582391601524 2.46915575394875 +1.48214448754213 1.53879281271718 +1.4912068652781 0.979131918386585 +1.82541061470754 1.39226628978274 +1.2216380894976 2.25808876472063 +2.11649472916967 2.0243155052746 +1.90153885771058 2.50217346007473 +2.36007095313168 2.15284366112671 +2.28906203618742 1.64459671590946 +1.62578520037702 0.821377809611178 +1.1110775689455 2.22032755844764 +1.8635605643364 2.40087107055358 +2.09183654653377 1.34988790423147 +0.872394503545976 1.9846812933544 +2.65813240780032 2.06577502841598 +0.9358634492243 2.11474489093295 +2.2999238138879 2.16972417848653 +1.6459568906436 1.54678720094732 +1.59042574882976 0.847838851143635 +2.34586067542902 1.16915098483211 +1.96908318715639 1.63844431004024 +1.34265439117796 1.0369930839958 +1.95603716372049 2.45025325052909 +1.29663801843769 1.23274138615004 +1.61385912074245 2.34018767111472 +2.02008021016142 2.64130197464026 +1.7673765099468 0.788374002077646 +2.47515327156059 1.77601105951241 +1.3163588638278 2.51692934891678 +1.0382704720693 2.01944695132322 +1.62894656647465 1.04612109389625 +0.911594330784893 2.27131386379249 +1.41973670570648 2.15037018850914 +2.40839581236712 1.57516539887062 +2.25755018273837 0.915837913737661 +1.89764962995893 1.26995336030147 +1.82693454621828 2.11648050927094 +1.64005811930171 2.28047563925854 +2.44868335219569 1.45467624439395 +2.55841537330768 1.36189262334726 +2.37518231895289 1.833181783543 +1.47316089969597 1.86670759598262 +1.125898226616 1.81419126641474 +2.0114112575399 0.85737784531436 +1.81523632489613 2.23143097244672 +2.61371764492712 1.64575164763725 +2.33623889413199 2.45918004120237 +2.20544478016443 1.22356837567411 +1.06948112695015 1.77108633423693 +2.47640239259697 1.47772918686739 +2.16660320617727 2.3485662322352 +1.29295672474898 1.91197707735471 +2.26704049202151 1.74292334726549 +2.64814120902217 1.38909044534437 +1.17110919345639 2.2927105799178 +2.51823302124164 2.26377858918629 +1.23168327099181 2.50621259490497 +1.24498352639846 1.70472622940785 +2.28887639193333 1.13082855053715 +1.21332352001689 1.02522364536769 +1.39955121603332 2.40928487524859 +2.01770947080574 2.264336464447 +1.91481436461679 2.01647601973734 +2.18482033255047 2.63973390961534 +1.59586922776423 2.47019336697183 +1.16612518949903 1.66950980013705 +1.09561585586987 2.14985069770835 +1.34173300395257 2.10034691354728 +2.06791526217482 1.86828073718122 +1.64985608727208 2.26133396102584 +2.29832691884137 2.13705138441644 +1.44296374217144 1.54089159237689 +1.24665903624616 1.57208197515926 +1.52278113782874 2.16356504288039 +1.56639465734831 1.25759025568611 +1.2287265155325 2.23584984008691 +2.55609346366025 1.30525718115739 +2.09222360466144 2.40699938077948 +2.39318052258811 1.00023349792234 +1.77375966774768 0.890404005055345 +2.21294881774581 1.60462476218588 +2.52772266649178 1.5182161070267 +2.53191188442058 1.66331180230252 +1.44987391399688 2.25544928851075 +1.18945814434907 1.40767367462165 +1.15607887085996 1.61656317439399 +1.98611243067778 1.75293835577092 +1.59669702591843 1.02627433509103 +1.7576683577942 1.34360631507359 +2.33514190929479 2.11281040076635 +2.10722556848548 1.36357101284436 +1.68890596691988 1.57407823855526 +2.24949800407983 1.34834541704974 +2.7024350399012 1.84508921763397 +1.50028118862308 2.53286415197424 +2.01075712543449 2.41240316348619 +1.3789111836674 2.56796290025584 +1.30600076633043 2.10467135858259 +1.21141206937379 2.42292850835717 +1.79915459778628 1.56464383889458 +1.70257577531508 1.48335225739054 +1.37683739372066 1.47209906157575 +2.09389121270276 2.07983106745876 +2.0047259717109 2.68071214899889 +1.95050763290844 1.56452396696902 +1.87967144230849 2.2047030809196 +1.19932955176107 1.96956731284506 +1.84516127685553 2.46975297060332 +1.5967244552309 1.60852133295036 +2.33387593783907 2.14957267743073 +2.28166946813933 1.65890798856041 +1.04292838600001 2.37197729105598 +1.84678835932634 2.13063517537437 +2.21005451305307 1.14450751103929 +1.9252310977178 2.63893750587947 +1.69205163697249 2.04188125113494 +2.18663210606671 1.93054525459654 +1.624308816609 0.884750480749673 +2.09430095086343 1.58276071025527 +1.53156629622451 1.9668292146968 +1.91853189047895 1.68483957367486 +2.25314438341654 1.8189023583988 +1.52871596439279 0.978780830730713 +1.60436091896615 1.89812695978678 +1.82023328634448 1.06960366199262 +1.93130507505036 2.15522752235181 +1.92998299466915 1.44256137496329 +1.21935128465763 1.12597128840632 +1.85933320828251 1.5683514908748 +1.73932801139078 1.24538321232892 +1.1412915452244 1.00390166694767 +1.16482779428831 1.17640898887537 +1.80056279570333 2.170115871341 +0.996971818138874 2.02544500165156 +2.75938619858123 1.88306023365355 +2.51702631822324 1.42355174849278 +2.50486371245704 1.84993953582368 +1.92939088305927 2.68454607714436 +2.54915146202318 1.67783908684275 +1.84539555617145 1.09944693323693 +1.71749526121191 2.75323501600421 +2.19276006564967 2.56171316233001 +1.73413793135903 2.6071028471785 +1.61726929339117 1.12070192907474 +0.848524407469696 1.50620242625943 +1.943348732763 1.01865326937757 +0.870914261308527 1.87093239462472 +1.6667621186795 1.5875244713249 +1.35564191172368 1.08439556647561 +1.13714662984482 2.46906479679517 +2.08986551546029 2.37372925131119 +1.501746212584 2.5591252748715 +1.58123986607886 1.24298047040126 +1.78256226189903 1.66090366056657 +2.22954570547215 1.86934800398073 +1.80952709818727 1.30334009182177 +2.02025471952281 2.23703351429201 +1.79124785399533 1.0656720474223 +1.83432894245259 2.62393038997895 +2.21686078001282 0.885220917859143 +1.59251486881173 2.45538467120848 +1.49579375608361 1.07257611675656 +1.16668896981052 2.29701369347519 +2.11157225350387 1.00794372147418 +1.71420946948699 2.5514072514931 +1.73601201268382 2.75820976727179 +1.69159622675276 1.37786655871666 +1.84722739922351 1.73533707017548 +2.24904398851565 2.03724976115397 +2.15961083489007 1.93801995831079 +1.38926501198209 2.01466393515936 +1.75823363474107 1.90067976013295 +1.49630085051528 2.36623484174258 +2.71322801894046 1.68996222455523 +1.29068691142372 2.55146548709369 +1.74788063437215 1.66401386026762 +2.38658152472696 2.11187741717792 +1.28137671201027 2.617762466413 +2.36000314726031 2.53585816173351 +2.70774264026902 1.4498823722685 +1.84769354945517 2.53439432800285 +0.99460406646601 1.16836244611538 +2.28260789204857 2.14542796787343 +1.88434698990083 0.871519506429351 +1.62472355178095 1.30950842889494 +2.12844012070916 2.66094780385963 +1.90255749689884 2.41097447740383 +1.35426526189319 1.51016016629062 +1.22348492730877 2.42131598711482 +1.08248840364165 1.53614442840687 +1.6455704993429 0.893818878554768 +1.73305160690568 1.40444859721235 +1.21111858029387 2.02769511750436 +1.38897616880483 1.63637056759096 +1.36017558442897 1.84724881824575 +2.04758716792337 1.6016027172888 +1.62480882693551 2.13818359420172 +1.83008168869189 2.36990706155545 +1.99048400715462 2.62348343290857 +1.28670670323542 1.95160057720265 +1.36998709382958 1.18337852133907 +1.81238367220855 1.09232200541071 +0.78239291299603 1.85840460097886 +2.39982230727098 1.43136600124187 +1.61385126131124 1.75605918745047 +2.28781171747021 1.19028300883717 +2.72693124784625 1.92842326125599 +1.83069272384516 2.70636570498816 +1.46696480595044 2.16748486550993 +1.09524980651683 1.12004035989291 +2.69706099246166 1.83265080115295 +2.23199339256159 1.57057800328828 +1.23907641692556 1.57492213229186 +1.16428767748646 2.54879036637492 +1.89033101311422 1.10817738481334 +1.16884872362486 1.27024481583855 +1.10791654604024 1.51439075263686 +0.972250297651655 1.55993066447324 +0.938856097389973 2.31696856020487 +1.58386463577396 2.15222483173481 +2.64217897037513 1.73417913958899 +1.08253342889718 1.06453511070929 +1.91285732061139 1.35550989069513 +1.04193856100089 2.2389301108057 +1.54076951228551 2.12313802306078 +1.27482591707728 1.86355409397281 +2.26483319279275 1.54428217884622 +1.72570862014315 2.40476613071284 +1.36964530589528 2.03527250139511 +1.32389117077462 0.89796570687092 +1.34043794356755 1.57239418568558 +2.49963503934912 2.33108306628189 +2.05482187306977 1.97867077447943 +1.06781091958753 1.32887261126659 +2.35059018524832 1.9780753026577 +2.23494007017932 1.26897975102536 +1.21790829532317 2.56483748646922 +1.12490420228339 1.72844508568293 +1.3846305836784 2.51748414522505 +2.642218368114 1.98871404443405 +1.82125794839582 0.939068712745822 +1.16333584206647 1.95312172964684 +1.53477502612866 2.46866759470201 +2.22570445014632 2.23969371801711 +2.29198855020574 1.58915437616876 +1.04079472644723 1.12662669176704 +2.10851281647108 0.861104678348667 +1.58401776662565 0.803375404779828 +1.10438589564062 1.59167727035008 +0.849502897341764 1.85493716316766 +1.89914245948664 1.2222211565683 +1.76204570847101 1.75031623336024 +1.51462643638722 1.65514780905566 +1.86672697866804 0.971707073026365 +1.47660257386512 0.83918209419123 +1.32996026983953 2.38244943272791 +1.54007314796097 1.19401294038168 +1.33357886875517 2.21675169233701 +2.0991583759757 1.70750540167562 +0.910528338034666 1.3029131586524 +1.59809301186822 2.39620957801587 +2.35765673172197 2.02319647323809 +1.784427410909 0.849854812012083 +2.10189317724205 2.55130625701047 +1.48987620462201 2.09911303034521 +1.62698860837615 1.32377602810509 +2.57823542783505 1.44421183828733 +1.51395953631497 1.70172056507788 +1.40622015435717 2.38727226991675 +1.71533848949245 1.81663383542097 +2.54783100773118 2.127568401848 +2.12517974151767 2.2443723431649 +2.01009183748162 1.36973439900673 +1.50973413743547 2.6273661148595 +1.93981037939436 2.02706467824525 +2.20500571232758 2.5463676624792 +2.0636997390399 2.46982158579401 +2.69032013740859 1.84138381805739 +1.67430063894487 2.3292290596152 +2.2997770234803 1.1556274941983 +1.46370854087642 1.95784484174571 +1.28826544246174 1.34372771110854 +0.947558517088211 1.23578302793673 +1.23817269223279 1.80530029021672 +1.8875729325088 2.20300162395931 +1.32480344919956 2.02020953547678 +0.855390183613664 1.37196771147198 +1.2069904026436 2.44441805126614 +1.23986374399162 1.69785734078384 +2.20428063719071 1.56977823328695 +1.93589087900243 1.01211693229622 +2.26465181166775 1.51135945132471 +0.911883014240331 1.56307527887172 +1.2881366149383 1.68723021027169 +2.01858956132553 0.84377890637613 +1.9164324351447 1.72530772069938 +1.81378168145663 2.13291188002101 +2.02303225499115 1.25853974696717 +1.54149978373668 1.21660586305431 +1.54256826870612 1.23347193187571 +1.80257966461129 2.51351229519121 +2.38860951781741 1.4952967529977 +1.35360258769712 1.54574007632679 +1.68716530267618 2.01736038875303 +1.71660503883383 2.15101545582063 +1.39991684535987 0.901437263921863 +2.24125854110814 1.70511340721048 +2.08404721644244 1.96832331641114 +1.88526248837358 2.54804161934651 +2.23149066277317 1.18769970348573 +1.36886278724766 1.84008942775897 +2.54395106107465 1.8776358151669 +1.80992121620572 1.78206727698914 +2.6425031451369 2.20511074455924 +1.74967929253823 2.31733521352059 +1.02976232346616 1.36984809812627 +2.33313595967836 1.1790973837348 +2.1084008402983 2.68246534605048 +1.87331539903305 0.792950518415338 +1.5227176607447 1.85822131343654 +1.19830605991653 2.27919510797135 +2.20688755482904 2.20856631312988 +2.14079293933234 1.55582402691714 +2.27127304485536 1.80230864601678 +2.06936105246193 2.3970505581098 +1.98203694073952 2.04818174288145 +1.36873182699553 1.44699873913548 +2.52300117210022 1.22678445511035 +1.10957241150088 1.63087698117367 +1.91673614968083 1.50182323296092 +1.44052430892832 2.07027916655413 +2.3283151616203 1.1594869997213 +2.6890695080953 2.12345475981481 +1.9347213148 2.44733089579738 +1.0584609797085 2.20868913269139 +2.37273408750541 1.55255995829127 +2.38086586444176 2.24376739420465 +1.71014493003956 2.26307872541717 +2.40015456451572 1.43054931050635 +2.06057975579522 1.39731692428238 +2.12628218809611 2.49541312408171 +2.08765138032383 1.76941419065467 +1.82800365750215 1.43520193312786 +2.14120462069756 2.32792554188988 +2.66514978975526 1.92038437070794 +1.92006605908743 1.19449965468145 +0.910071529434091 1.57937968800313 +1.98353164548523 1.2523519438552 +2.22420478471092 2.49656400967858 +2.59352758292577 2.12305959824941 +2.14538577989212 1.46197399076543 +1.82465183035008 2.55469256125859 +2.34173690526283 1.44615440721161 +2.45552814331374 2.03443539513744 +2.27182702718771 2.55309702827132 +2.52222857343502 2.40139277883343 +1.01722781591586 2.35628066862471 +2.33731496798343 1.45179877521939 +1.36834412159867 1.21020074094183 +1.77001956336669 1.89528317049614 +1.46209600147328 1.7995252651023 +2.49895503649957 2.20439505575693 +2.72695962058849 1.77022426387764 +2.43606892552695 1.07655027367643 +1.81446787946395 0.936923766494757 +1.20434049375824 1.95176689511634 +2.73946240542925 1.96817760959736 +2.34537386380605 1.16424313419036 +1.8809288651543 2.08188688101716 +1.01384328397459 1.38040360481015 +2.50658143682874 1.69605045065752 +2.14012435077227 1.87925115691966 +1.36395836946092 2.00527196795217 +1.89573105050719 1.64207291182599 +1.20397278618536 1.72255919266961 +0.946931813829607 2.03130432496316 +0.869060640209383 1.86971780982337 +1.42185074881188 1.58935029197953 +2.46008454494647 1.47069367904685 +2.63733681569344 1.46384389091752 +1.96147482561416 1.93314248601443 +1.75158662906385 0.844358419985539 +1.66407983888409 1.75484927649147 +1.09420993363953 2.25118435731626 +1.96300515344835 2.58435381295628 +1.12971979704223 1.13447543604768 +2.50548798915467 2.15807106111667 +1.37905677965379 1.70334213551602 +1.88741313664711 2.11332152456514 +1.75200690560511 1.72778356260157 +1.85343009232081 2.74943090275399 +1.66701601304582 1.58266448786951 +2.06060157156087 2.5983448582562 +1.03375785843006 1.67966508444867 +2.09983234432063 2.50233413557059 +2.23171246422923 1.73534588607497 +1.45669064762539 2.57641741538889 +1.28274398854471 0.973347841282404 +2.32710757198281 1.84050383510537 +1.4650554512488 1.71907558523587 +1.88101085180885 1.27064705498985 +1.66971365035049 1.09022759877614 +1.06084336245901 2.32681811087302 +2.39005445594437 1.81367302172355 +2.66062950853727 1.73792626055426 +2.23822811849884 2.20084294023461 +2.07663878145165 0.971479487125791 +2.21255751371852 1.03976312976799 +1.67159637248612 2.41578733686826 +2.64341469480193 2.05478821367494 +1.76918037280909 0.87979914060331 +2.29533981437332 1.45369740620724 +2.28092432113833 2.52231983233712 +1.08057054456792 1.69884926638103 +1.72356088597796 1.20098560022659 +2.40045642339162 1.37353531036995 +1.63278923834211 0.979959168923116 +1.24711453447587 1.47270102192185 +1.41364345465324 1.17142148035116 +1.40679623930252 2.65462634012572 +2.26206964858345 2.52626020646191 +2.2152064395929 1.51535455834589 +2.11366485244132 1.12888948105744 +1.54021871740467 2.61893612238116 +2.57729159852958 1.73163648507095 +2.32873259576506 2.26577758787668 +1.3777135526622 1.47293439785487 +1.86927449794955 1.48492860792673 +2.59466608286372 1.38248930637262 +2.26734175298832 2.47891021052844 +1.31337023990697 1.82223458540163 +1.24240405343941 1.18430521943144 +2.62445936816282 1.32152607523373 +1.71301395351536 2.22343837002761 +1.2917313133227 1.4627871685522 +1.29335379831961 1.5417387084896 +1.25506612657107 1.80733384050897 +1.40077403931416 1.19560234480075 +1.46618668687067 1.38113155400849 +1.36366749553478 1.85430074854291 +1.50023904813922 1.62213557538114 +1.62190971568695 1.62130564962051 +1.39443309324331 1.05179616062439 +2.27074950745798 1.1312690749625 +2.553587385905 1.43552254138983 +1.53183547126598 2.67600197343624 +2.18884599999062 2.32355412047825 +0.936309844707168 1.22305999136067 +1.23443041926718 1.37082888650245 +2.28148243900857 2.28341878762937 +1.27314836018256 2.20110956810883 +1.62544762528039 2.5541488518799 +0.830290136379219 1.97992169157139 +1.29127333451531 1.61146916214845 +2.66352933411024 1.97317747910044 +1.7241282346519 1.39452690629757 +2.08526308181233 2.20828546349428 +1.98165581746495 2.570699851946 +2.67558635397351 1.61222709481142 +1.95577637570447 1.08040807441345 +2.13949492246381 2.24691811807922 +0.973674076129711 2.04697367547548 +1.02287198881156 1.40018894847951 +1.37979170911483 1.19538719112522 +1.8170695463056 2.64371613039172 +2.37892677586994 2.1635224469798 +1.33002465542874 1.84869962370492 +2.22202401187739 2.31059439339212 +1.70944875430784 0.988130749189324 +2.27264988979793 2.42900166193492 +1.973820864209 1.8356534670899 +0.943292836644716 1.85940530335999 +2.4269647444459 1.4003754695738 +1.34438599877528 2.25244545097 +1.06480238864667 1.61826601390264 +1.22243536495141 1.0553636285337 +1.70787697237454 1.53941945124886 +0.824707948588795 1.81343517544217 +1.22517536905131 1.82577829108111 +1.24580874478913 2.02934443297333 +1.41574704133443 2.27074776681609 +2.14286190828777 1.7028892212408 +2.45225880619607 1.62901906108952 +1.832916664877 2.45949902337513 +1.38314388365022 2.49857383406258 +2.55277228586844 1.68072548460684 +1.97510911522902 1.85856421849228 +1.16026099397338 1.85093540790028 +0.908052793690032 1.54417461621112 +1.46996272866151 1.89972653750799 +1.42269578481562 1.49545256885237 +1.09543755881957 2.34072581863499 +1.76423035234681 1.5520092984522 +2.05366313083566 1.66761104248933 +1.66058160037271 1.45764687329999 +2.24909078840635 1.50799303081355 +2.55300182056151 1.44245072592518 +1.4102949951636 2.53748470450289 +2.0538995726826 1.42841069818012 +1.62037280386782 1.68639495036102 +1.97797731588132 2.16648118336222 +1.33874720519877 0.98694965800739 +1.18056060232691 2.43859493311963 +1.42654435800857 2.10894671453632 +1.61166532848499 2.26496549228675 +1.75589820042721 2.53035126764796 +1.54476565912834 2.24690273542426 +1.41120494248814 1.66925550716466 +1.96596214732624 1.23363653056792 +1.2176972860331 1.68740598995707 +1.93604036931417 1.70245913507885 +1.9140571984323 1.20798783142588 +2.01488871451936 1.7600762438821 +1.33106074154846 1.32337049998812 +1.45916425668172 1.9378661261732 +1.44719232617415 2.13419445695824 +1.8232000963623 0.795216975254035 +2.14441486074126 2.33182833528615 +1.32411571311302 1.51999640416822 +2.11311532229654 0.882472427082277 +1.12051013296939 2.25826785339511 +1.92561532835013 1.43555162985346 +1.55879366582728 1.50455867916814 +1.9116483921977 2.30319303365893 +1.63532639852262 1.01616767652801 +1.27929723587356 2.36850546112753 +1.86687381936706 1.30491372289605 +1.33543519907168 2.02083788810766 +1.88690025357998 0.819735746351099 +2.26376114178917 1.11274259862027 +2.61101390839092 1.82948426947466 +2.38788602266929 2.28482720626987 +2.08942085037476 2.49272399813405 +1.17978070442102 2.39975299638233 +2.32327933701224 1.31161044396929 +1.35409592137209 2.14441461394078 +2.14230641581587 1.87033197980843 +2.38100727087355 2.46242265215612 +1.06612644063777 1.66757002378351 +1.58065088510982 1.53239391835875 +1.38599117611072 2.11224601462952 +1.51564493075482 1.58990577700085 +1.27448941672689 1.78379244728482 +2.02597800920255 2.33758155255116 +1.92984876128382 1.04631565230436 +2.16896616014189 1.41726524707398 +1.1232520229975 2.28656195288993 +1.53536975428931 2.65607313205025 +2.48693667304239 1.57355081009216 +0.969110154067195 1.7544694258692 +2.31991998756773 1.60784363931513 +1.96599201810083 2.47112198032907 +1.37500282422177 1.36929603850938 +1.97442734100393 1.84608277118302 +2.012116403305 2.68556955501906 +1.89432944914795 1.49548744036353 +1.11479338536508 2.45182400893889 +1.77078662112824 1.88201846553481 +2.48274940157778 1.39988923397384 +2.02191496452785 2.27551297097004 +1.4813479063893 1.45304116651884 +2.43122569051108 1.2681610579463 +1.25621928412936 1.65362658489964 +2.20663149606905 2.50267435422636 +1.1090416363301 2.38365712201691 +2.11170576977081 1.62636906885079 +2.36058917314283 1.12612367792524 +1.42133744199297 1.49877353243998 +1.52054408982865 2.46225039007611 +1.95592078797988 1.59675618725366 +1.52376493717081 1.06432069464124 +1.91809857449032 2.429699277957 +1.92890262090138 2.42482337074823 +1.45568841042466 1.75842855310536 +1.78746693393684 1.01074020516596 +2.31978582030631 1.5854369201558 +2.38136696162171 1.55359279176689 +2.10627992216966 2.4718454200635 +2.22098343305758 2.24705617081842 +1.61809216950557 1.78240305090792 +2.11040533565453 1.5285033699917 +1.05427005140579 1.8601737990836 +2.50734154785521 2.2729235081794 +1.55428148199341 1.02910263462461 +1.55520365851468 1.68277992027349 +1.7828801721195 2.08259969715498 +1.3442463832768 2.63423279718033 +2.16190499159045 0.984791719806409 +2.1759890937107 2.23978024952582 +1.18830668086655 1.12674818698785 +2.32641471734739 2.05085808549545 +1.10201556420422 1.5327623528289 +1.95124487530909 1.40105570851362 +2.12215857904873 2.28065889792985 +2.58059024390302 2.13967080552599 +2.07965548428244 2.66389738860077 +1.85356733993164 1.66887301670856 +1.61599925159014 1.36766228264666 +1.76464588519654 1.10423853945456 +1.16818135742567 1.5041044526594 +1.44936316691808 2.16858701639346 +1.73905031754172 1.16347965689845 +1.77984635157979 2.35724340284712 +2.13782947974748 1.43804365395106 +0.846182306283182 1.76071875028781 +1.75974759784064 1.79943474986128 +1.24032733588017 2.51173965817786 +2.59115123095459 1.35317879634447 +1.37115818307272 2.49399163153491 +2.13269139405809 1.27905609922952 +0.942337025408662 1.30542006761304 +0.987829342004872 1.56421675485096 +2.73750922501556 1.79049813792578 +2.74844422981612 1.9259990053112 +1.20429695070258 2.37726601677484 +2.286453394671 2.11892888693608 +2.16265143173284 1.91750701468906 +1.30495064145423 1.10966514980889 +0.996028702692157 2.29770108497239 +1.64997395964808 1.80846900677777 +1.95647368811539 1.07609837593979 +1.28760006323925 1.8343451460684 +1.98413811296693 1.47403851941696 +1.11802726423836 1.23275371965489 +2.00608095339037 2.23008204343266 +0.818535812650836 1.62359830316625 +1.96692966808102 2.67304410830609 +1.31824363859839 1.60381639048926 +2.56712102748922 2.19997654434762 +2.61848339995838 1.92511563327632 +0.848929583546734 1.94654534057251 +1.30946602438114 1.01260709481782 +1.46764758553824 1.77871324260256 +2.11727605525873 1.87062453197784 +2.31684118461332 1.96725487195424 +1.28926258113704 2.20354176311291 +1.13380784260578 1.24447861452124 +2.2709183208505 1.07373693863398 +1.62333012231163 1.61330082871488 +1.90247609418354 0.841684720465011 +2.27427153194792 2.16712289581544 +1.79568515902854 2.52569280470259 +2.36436064074285 1.80279399360067 +1.33549034687228 1.56553267965562 +1.5877063143859 0.954713235595292 +2.67703772847078 1.96307832001246 +1.98029490326068 2.69074537557087 +2.29018678375057 1.9203985878127 +1.9528137305612 2.18963844188026 +1.62900465073533 0.851821898003138 +1.8261018148484 2.04248058887667 +1.94600980820603 1.48577169001571 +2.10552425960547 1.02406883564315 +1.6063996548625 1.51149635015524 +2.12657319892666 2.24943373397461 +1.8859046073491 2.48103042923383 +1.60601061068944 0.79293017507068 +2.66158239821441 1.56835322732574 +2.22582618471704 1.71138755277268 +2.10327124268285 2.60083939785606 +1.49028712835631 1.1530838636542 +1.58894970546014 1.71314759877048 +1.2716345870728 2.22632729167587 +1.76619265925608 1.24663973225749 +1.87959028685934 1.881389125702 +2.22903537096924 1.62923624132297 +1.68177410306878 1.6212565079192 +1.11103381308264 1.65887867052577 +2.53219037734441 1.3935678824643 +2.22521159843079 1.77212875195316 +2.69916082451589 1.77553042162678 +0.845868532531924 1.60244799590207 +1.68030830331615 0.880827802219457 +1.8319131955365 1.04045582142196 +1.98743152104787 1.70002036167777 +1.78982298680118 2.05624773817605 +1.4420951469662 1.65809505478016 +2.19454725970051 2.53734100748322 +1.17885528689719 1.72429774979598 +1.29493359707764 1.87854176059834 +2.19322727013848 2.18979466006628 +1.22710595511369 1.28079498278446 +1.11343798365912 2.50373064259536 +1.58541579962245 1.31326491849965 +2.57271207215733 1.52909841480202 +1.90714440307117 2.69767834384463 +2.04119112527466 1.45118695822082 +1.55897536034233 1.20226766586772 +2.20615121720827 2.64919675698972 +2.14657714263073 1.95717191741339 +1.9979023840139 2.47617957879326 +0.995723421205795 1.16315554406962 +1.55759704670406 2.36637035709343 +2.02509968074507 2.49359882109336 +1.61031217685438 0.852078430340654 +1.23693228837571 1.6809427910345 +1.73573854331396 1.83608593930028 +1.84073130831591 2.36632144170351 +2.10070680452979 0.995882344418234 +1.01673077883667 1.54810602402783 +0.995753041920281 2.22843029767743 +2.04452911669857 1.0305178891728 +1.65817621674857 1.05891666979066 +2.5083689819882 1.71544630637861 +2.17241708746648 2.06838159075475 +1.62838149488441 0.798972534467495 +2.19360103875867 0.872486376189656 +1.95204540666661 1.96934931537605 +2.0561848073127 2.60279478922955 +1.93493435111365 1.67399531601465 +1.95030282203577 2.58549394783101 +1.96968566624916 0.928670536712772 +1.18283402918375 2.27923785055525 +2.588385721189 1.86390569804705 +2.03616986189506 1.03764298836238 +1.92377462609208 0.877893149838812 +2.02324804662264 2.0656612408208 +1.15117022021211 1.55987348638943 +0.954182665315485 1.91690948185048 +2.4671769640269 1.9754807464639 +1.1690973793017 1.75093490349597 +2.39433768716178 2.04953373268715 +1.07130642932377 1.90837815868205 +2.54104598423935 1.76584475486077 +2.02963809481359 1.85489695890344 +2.15225784944839 2.2074596569734 +1.29596258512235 1.93286260635129 +1.87184814921117 1.68530835209883 +1.93546250693969 2.71340740563816 +1.73714067319877 2.40995939959309 +1.44692363122947 1.05451892687476 +1.61625410289041 2.39355465442411 +0.893546550055063 1.4390686624544 +1.71688511966265 1.23663688309959 +1.47665145479611 1.51911065027586 +1.40121454045645 2.0674275574135 +1.1595156001719 1.41867057372353 +2.4297197591374 2.2864455026435 +1.95689723639286 1.67259969840295 +1.2688892241428 1.72363325067333 +1.45872123165525 1.07048050499058 +2.36701041551775 1.41306128956846 +1.41117220649964 1.71092125446073 +1.28650950452781 1.88902554054282 +2.60328676550186 1.79733399719468 +1.0194826898957 2.1060670278827 +1.6102704913216 2.45365540123082 +2.69970157834238 2.02939208036757 +2.13423739046327 2.54121674549303 +1.0870130457776 2.20111103540755 +1.74891777754298 1.6137195168855 +2.10288493380419 1.09288244646511 +1.12365081392697 2.24643083287872 +1.82027369037739 1.80849900002129 +0.812047380474425 1.76744591583035 +1.97605230983815 1.80138213558591 +2.35176527778155 1.1965163312713 +1.34877642754934 2.60689561114184 +1.68330259340352 1.06567607539243 +2.14893332960553 2.68073109116874 +1.44654184252492 2.5382487098547 +1.04957813768185 1.24510675294793 +2.04183168381921 1.01895991851113 +1.6726394705335 1.03568705356217 +1.84575530605859 2.0307477037283 +1.78360332267827 0.911467981045163 +2.6942047742056 1.98970960731155 +1.07882269517056 1.83100545172117 +1.10382877521685 1.83805653125516 +2.197287935284 2.29464287309445 +1.01335024226925 1.60206285583324 +2.20504219782076 1.59781177759639 +0.85373031020633 1.80173005069143 +1.43070989333562 0.868500549797929 +2.27247411337218 2.1190813677246 +2.69477952037475 1.98821591537914 +2.53789769065104 1.9768398487801 +1.8941623610305 1.54174313879109 +2.05472045204139 2.21687176496259 +1.90596408395565 2.33692428235433 +2.11857953340284 1.95522838224194 +2.31781962791926 2.59611699263102 +1.96418013049758 2.37679017706311 +1.70504877761475 0.983754108645982 +1.40075232971928 1.53034930209166 +1.50987807054541 2.35437637659258 +1.94730879365004 0.956056825337029 +2.59953519933394 1.64105962401725 +2.20600923056251 1.80753096424512 +2.15166682515762 1.26168947805188 +2.05179303370885 2.53829013741112 +2.48290999884254 2.42505120903722 +1.75345120717309 1.27436785724483 +0.862072138567484 1.43139209038115 +1.94856477899945 2.48819276502824 +2.10980694972447 2.40273243746257 +1.75878333345524 1.06227848584301 +1.57259475438393 1.13473222403324 +1.79679801267109 0.813074383405066 +1.27930476468525 2.07789244594521 +2.14379168719522 1.21492303301117 +1.71574011536784 1.92341273835397 +2.2407992360974 1.65003143575511 +2.63286345357544 2.26285803968555 +0.963856132687098 1.82383368116341 +1.91842859609521 0.948995231171167 +2.13672088830269 2.25992616311184 +2.25316957848615 1.00548779497392 +1.99666569566823 2.15223494636363 +1.59620739052541 1.55770651342816 +1.42205060411713 1.73395175625107 +1.7809507688368 0.923350802143133 +1.58581164338163 1.27766224135182 +2.00081179382748 1.83898921123243 +1.17015621623493 2.44627645610369 +1.49247245442591 1.49238726122773 +1.71821013603828 1.28483272482178 +2.40550469419456 1.13289890297226 +2.30485379089139 0.976926706717378 +1.45685960583857 2.15238023734189 +2.66676675852857 1.417357465704 +2.58171850022397 1.4882922340045 +1.63564487871251 2.68535850428513 +2.3262798953662 2.2995590716461 +0.915678945356077 1.83730438117406 +1.1113798525045 1.03378073823175 +2.15677673181987 1.67517686913258 +2.27677701412237 1.11813125012911 +1.76863910908348 1.14997074425689 +2.44791574914477 1.99394920746333 +2.19249521241061 1.32857752589977 +2.39730546854905 2.05045901749752 +1.45415400898553 2.66703477267644 +1.04514181053734 2.37587517824493 +2.6511122169765 1.48496562194548 +1.9708976494009 2.15644925551958 +2.3221754901903 1.86441070398784 +0.977867301539547 2.13399638699836 +1.73119007422126 0.979559548185236 +1.03295819692782 1.34611583127177 +1.64836286984853 0.932713570059306 +1.35591379458553 2.5194491227018 +1.61864069102801 1.70306805612988 +2.5355301359529 2.29669008635453 +1.1875470843958 1.01907152262054 +2.07577183841265 2.04954044612593 +1.50718509078494 2.01363050098068 +1.03916075824297 1.27601484580434 +2.52461983168966 1.31756944533906 +1.72674821082971 1.66165958483241 +0.973977545262343 1.69940046173058 +2.2731992998021 1.19871118663347 +1.90805292081556 1.56664613120726 +1.16164120378441 0.976315680877334 +1.60323032036892 1.8716576848971 +1.92896213707051 1.06678975791208 +1.37272274259946 1.17729465755171 +1.69500893958381 1.34053261716387 +1.48528027579657 1.09727497909687 +2.68170243453703 1.9586577894958 +1.24687312122903 2.07632410245485 +1.10435597504026 2.23920713177419 +1.03587976934857 1.44892789794601 +2.73478236979342 1.71850396598226 +2.12589664383329 1.40451862056277 +1.66984084386847 2.06916442372373 +1.90038858632094 2.15643746171615 +1.52174095668367 2.68309449356518 +2.02016988860912 1.01656673800669 +2.31586586030669 2.07344346585221 +2.47738069154791 2.06655868094883 +1.03824528538159 2.01876151979438 +2.15787635369695 1.72822184579915 +2.40903998118362 2.39547690468378 +0.974212498390145 2.2671528733196 +2.00397084186322 1.13507457029543 +1.2970252144011 2.11010706330187 +2.54493198030166 2.28538889017425 +1.50717117636002 2.62788947111464 +2.02355599262289 1.28329159085176 +1.30585334588311 1.33432565814353 +1.45712289585269 1.0749380397471 +0.985628696144587 1.66987102523915 +1.12883733770347 2.52191695057324 +2.43141569102652 2.21545025239236 +1.23088925004474 2.31003050411589 +2.37462820437274 1.94815362941942 +2.41612967940516 1.18171521181709 +1.71578799419574 1.46201287394858 +1.39320079049564 1.15970499393067 +1.77723694405056 0.818807184216595 +1.50684847262225 2.11892986715308 +1.637133 1.271925 diff --git a/inst/dev/datasets/cec/mouse_1_classic/aproximation.txt b/inst/dev/datasets/cec/mouse_1_classic/aproximation.txt new file mode 100644 index 00000000..7813681f --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/aproximation.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_classic/cluster.txt b/inst/dev/datasets/cec/mouse_1_classic/cluster.txt new file mode 100644 index 00000000..17bc6723 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/cluster.txt @@ -0,0 +1,3000 @@ +1 +1 +2 +1 +1 +3 +1 +1 +1 +2 +3 +3 +1 +2 +1 +1 +2 +1 +3 +2 +1 +1 +3 +1 +1 +3 +1 +2 +1 +1 +1 +3 +2 +3 +2 +1 +1 +1 +1 +1 +1 +3 +3 +1 +3 +3 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +3 +1 +2 +1 +1 +1 +1 +1 +2 +3 +1 +2 +1 +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +2 +1 +2 +2 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +3 +2 +3 +1 +1 +1 +1 +1 +3 +1 +2 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +2 +3 +1 +1 +2 +3 +3 +1 +3 +2 +1 +3 +3 +3 +1 +2 +2 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +3 +1 +1 +1 +2 +1 +1 +3 +1 +2 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +2 +1 +3 +1 +1 +2 +2 +1 +2 +1 +3 +2 +2 +1 +1 +1 +2 +1 +1 +2 +2 +1 +3 +2 +1 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +2 +2 +1 +1 +1 +1 +3 +1 +2 +3 +2 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +3 +1 +1 +1 +1 +3 +1 +1 +1 +1 +3 +2 +1 +1 +1 +1 +3 +1 +3 +3 +1 +1 +1 +1 +1 +2 +2 +1 +3 +1 +1 +2 +1 +1 +1 +1 +1 +1 +2 +2 +1 +3 +1 +1 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +2 +1 +3 +1 +1 +3 +1 +1 +2 +2 +2 +1 +1 +1 +2 +1 +1 +1 +1 +2 +2 +3 +1 +2 +3 +1 +3 +1 +2 +3 +2 +1 +1 +3 +1 +1 +1 +1 +1 +2 +3 +3 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +3 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +2 +1 +1 +3 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +2 +1 +2 +2 +1 +3 +1 +1 +1 +1 +2 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +3 +1 +3 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +1 +2 +2 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +2 +1 +1 +2 +1 +1 +3 +1 +1 +3 +1 +3 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +1 +3 +3 +2 +1 +2 +2 +1 +3 +1 +3 +1 +1 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +3 +1 +3 +1 +1 +1 +2 +3 +2 +3 +1 +1 +1 +3 +1 +1 +2 +2 +1 +1 +1 +1 +1 +1 +3 +2 +1 +3 +1 +1 +1 +1 +3 +1 +1 +3 +3 +1 +3 +2 +3 +1 +1 +1 +1 +1 +1 +2 +2 +1 +1 +3 +1 +1 +2 +2 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +3 +3 +1 +1 +1 +3 +2 +1 +1 +1 +1 +1 +1 +2 +3 +1 +1 +1 +3 +1 +3 +2 +2 +1 +1 +2 +2 +2 +1 +3 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +2 +1 +1 +1 +1 +1 +3 +2 +1 +3 +2 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +2 +1 +1 +1 +3 +3 +1 +1 +1 +1 +3 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +3 +1 +1 +1 +1 +1 +2 +2 +2 +1 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +2 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +3 +2 +2 +3 +3 +2 +1 +2 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +3 +2 +3 +2 +1 +2 +1 +1 +2 +1 +1 +2 +2 +2 +1 +1 +1 +1 +3 +1 +3 +1 +1 +2 +1 +1 +3 +3 +1 +1 +1 +3 +2 +2 +1 +1 +3 +1 +1 +3 +1 +1 +1 +1 +1 +3 +1 +3 +2 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +3 +1 +3 +1 +1 +1 +2 +1 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +3 +3 +1 +1 +1 +3 +3 +1 +2 +1 +1 +1 +3 +3 +1 +1 +1 +1 +3 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +2 +1 +1 +1 +3 +3 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +2 +1 +3 +3 +1 +1 +2 +1 +1 +2 +1 +1 +1 +3 +1 +1 +1 +3 +3 +2 +3 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +2 +3 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +1 +3 +2 +1 +1 +2 +1 +1 +3 +2 +1 +1 +3 +3 +1 +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +1 +2 +1 +1 +3 +3 +3 +2 +1 +1 +1 +1 +2 +2 +2 +2 +1 +1 +3 +1 +1 +2 +1 +1 +1 +1 +3 +2 +1 +2 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +3 +1 +1 +1 +2 +1 +1 +1 +3 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +3 +1 +3 +2 +2 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +3 +1 +1 +3 +2 +1 +2 +1 +1 +1 +3 +3 +1 +1 +2 +1 +1 +1 +1 +2 +1 +2 +3 +1 +1 +1 +1 +1 +2 +1 +1 +3 +2 +3 +1 +2 +1 +1 +3 +1 +2 +2 +3 +1 +3 +2 +1 +1 +3 +1 +1 +2 +2 +1 +2 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +2 +1 +2 +1 +3 +1 +1 +3 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +3 +3 +3 +2 +1 +1 +1 +3 +1 +1 +1 +3 +2 +2 +3 +1 +1 +2 +2 +3 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +2 +3 +1 +1 +3 +1 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +3 +1 +2 +1 +1 +3 +3 +1 +2 +2 +1 +1 +2 +2 +3 +1 +3 +2 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +2 +3 +1 +2 +2 +1 +1 +1 +1 +3 +1 +3 +2 +1 +3 +1 +3 +2 +1 +2 +3 +1 +1 +3 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +3 +2 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +3 +2 +1 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +3 +2 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +3 +3 +1 +1 +3 +3 +3 +3 +2 +3 +3 +1 +1 +1 +1 +1 +1 +1 +2 +2 +1 +2 +2 +2 +1 +3 +1 +1 +1 +2 +1 +1 +1 +3 +3 +1 +1 +1 +1 +1 +1 +3 +1 +1 +2 +1 +2 +3 +1 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +3 +1 +1 +3 +1 +3 +3 +1 +3 +1 +3 +1 +1 +3 +1 +2 +1 +2 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +3 +1 +1 +3 +2 +3 +1 +1 +2 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +3 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +3 +2 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +3 +2 +1 +2 +1 +1 +2 +2 +3 +1 +1 +1 +3 +1 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +1 +3 +2 +2 +2 +3 +1 +3 +3 +3 +1 +1 +1 +1 +2 +1 +1 +1 +3 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +1 +2 +1 +1 +2 +1 +1 +1 +2 +2 +2 +1 +1 +1 +2 +2 +1 +1 +3 +1 +1 +3 +1 +1 +3 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +2 +1 +3 +2 +2 +1 +1 +1 +3 +1 +1 +3 +1 +1 +3 +1 +1 +1 +1 +1 +3 +2 +1 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +2 +1 +2 +3 +1 +2 +3 +1 +2 +1 +2 +1 +1 +3 +1 +3 +1 +1 +1 +1 +2 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +3 +3 +3 +1 +2 +1 +1 +1 +1 +2 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +3 +1 +3 +1 +1 +1 +2 +3 +2 +3 +1 +2 +2 +1 +1 +1 +1 +1 +1 +1 +1 +3 +3 +3 +3 +1 +2 +1 +3 +1 +1 +2 +1 +1 +2 +3 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +3 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +2 +1 +1 +2 +1 +1 +3 +2 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +2 +1 +1 +1 +1 +1 +2 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +2 +3 +1 +1 +1 +3 +1 +1 +1 +1 +1 +2 +1 +1 +2 +2 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +2 +2 +1 +1 +1 +3 +3 +1 +3 +1 +2 +1 +3 +1 +2 +1 +1 +2 +1 +2 +2 +3 +3 +1 +1 +1 +1 +1 +1 +2 +3 +1 +3 +1 +3 +3 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +3 +2 +2 +3 +1 +1 +3 +1 +1 +3 +3 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +3 +1 +2 +3 +1 +2 +3 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +1 +3 +2 +3 +2 +3 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +2 +3 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +3 +1 +1 +1 +2 +1 +1 +2 +1 +1 +3 +3 +1 +1 +1 +1 +1 +3 +1 +2 +2 +1 +3 +3 +2 +2 +3 +1 +1 +2 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +2 +3 +3 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +2 +2 +1 +1 +2 +1 +1 +1 +1 +2 +1 +3 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +1 +2 +3 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +2 +1 +3 +1 +1 +1 +3 +1 +1 +1 +1 +3 +2 +1 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +2 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +3 +2 +1 +1 +1 +1 +3 +2 +1 +3 +1 +1 +1 +1 +1 +3 +2 +1 +2 +1 +1 +1 +1 +3 +1 +3 +1 +1 +1 +1 +2 +1 +1 +2 +1 +3 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +2 +1 +1 +3 +1 +1 +3 +2 +1 +3 +1 +3 +1 +1 +3 +1 +1 +1 +1 +2 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +3 +3 +1 +1 +2 +1 +1 +2 +1 +1 +3 +1 +2 +3 +1 +1 +3 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +3 +1 +1 +1 +1 +3 +1 +2 +2 +1 +1 +2 +3 +3 +1 +1 +1 +1 +3 +1 +2 +1 +1 +3 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +1 +3 +1 +1 +1 +1 +3 +3 +1 +1 +1 +1 +3 +1 +1 +3 +2 +3 +1 +1 +1 +1 +1 +1 +1 +3 +1 +3 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +3 +1 +1 +1 +1 +3 +1 +1 +3 +1 +2 +1 +3 +1 +1 +1 +3 +1 +2 +1 +3 +1 +1 +1 +1 +1 +1 +1 +2 +3 +3 +1 +3 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +3 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +3 +1 +1 +1 +2 +3 +1 +1 +3 +1 +3 +1 +1 +1 +3 +3 +3 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +3 +2 +2 +3 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +1 +2 +1 +2 +1 +3 +1 +1 +1 +3 +1 +1 +2 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +3 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +3 +1 +3 +1 +2 +1 +1 +3 +1 +1 +2 +2 +3 +1 +3 +2 +1 +1 +1 +1 +3 +2 +1 +1 +1 +1 +1 +2 +1 +1 +2 +1 +3 +3 +3 +1 +1 +1 +1 +1 +3 +1 +1 +3 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +3 +3 +1 +1 +1 +2 +1 +2 +2 +1 +1 +1 +3 +1 +1 +1 +1 +2 +3 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +1 +3 +1 +1 +3 +2 +1 +1 +3 +1 +3 +1 +1 +1 +3 +2 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +1 +1 +1 +1 +3 +2 +1 +1 +1 +2 +3 +1 +3 +3 +1 +1 +1 +3 +2 +3 +3 +1 +1 +1 +1 +3 +1 +1 +2 +1 +1 +1 +1 +1 +2 +3 +1 +1 +2 +1 +3 +1 +3 +3 +1 +1 +2 +2 +1 +1 +3 +3 +1 +2 +2 +2 +3 +1 +3 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 +2 +1 +2 +1 +1 +3 +1 +3 +2 +3 diff --git a/inst/dev/datasets/cec/mouse_1_classic/dimension.txt b/inst/dev/datasets/cec/mouse_1_classic/dimension.txt new file mode 100644 index 00000000..d8263ee9 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/dimension.txt @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_classic/energy.txt b/inst/dev/datasets/cec/mouse_1_classic/energy.txt new file mode 100644 index 00000000..609ee687 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/energy.txt @@ -0,0 +1 @@ +3.244439 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_classic/input.txt b/inst/dev/datasets/cec/mouse_1_classic/input.txt new file mode 100644 index 00000000..f66c05e4 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/input.txt @@ -0,0 +1,3000 @@ +1.61209664866328 -0.413233458995819 +1.7419871352613 0.422129414975643 +2.60966323688626 2.46355983987451 +0.568149335682392 0.856650270521641 +0.851572871208191 -0.572510071098804 +-1.7647741548717 2.44094736874104 +0.0541942827403545 1.43476314470172 +-0.930218894034624 0.573868732899427 +-1.6590390317142 -0.3955920599401 +1.34443809464574 2.4018629565835 +-1.95066077634692 1.31018867716193 +-1.99102317169309 2.0375639796257 +-1.87702622264624 0.490582834929228 +1.18097758665681 2.49035001918674 +1.75239836424589 1.01476220041513 +-1.3817829452455 -0.516242548823357 +2.34617390856147 1.08066613599658 +-0.212645802646875 -0.185363076627254 +-1.002137940377 2.07395137101412 +1.96139129623771 1.15754506364465 +-1.17359426617622 -0.0368270874023438 +1.59891119971871 -1.13823710381985 +-1.92849291115999 2.71003712341189 +1.03600108250976 -1.59709620848298 +1.58836380019784 -0.717748995870352 +-2.6994837410748 1.43561523407698 +0.704492211341858 -1.67905734106898 +1.16078396886587 2.09976300224662 +0.32393553853035 0.34012009203434 +-1.48058445751667 -0.862582374364138 +0.527839832007885 1.35229524224997 +-2.25163902342319 2.51089239120483 +2.7304120734334 2.23758437484503 +-2.0815456584096 2.57961105182767 +2.00750983133912 1.18487352877855 +-0.0966376960277557 0.740540828555822 +1.93578931689262 -0.392697624862194 +-0.0973851718008518 -1.6135998480022 +1.50237447395921 -0.545705925673246 +-0.0956599004566669 0.0203249007463455 +0.606842920184135 0.921810775995255 +-1.2021614536643 1.63980568572879 +-2.47400256618857 2.00858150050044 +0.700028248131275 0.372448820620775 +-2.44264267757535 2.41181446239352 +-1.99176017567515 1.7488059848547 +0.147386211901903 -0.743320502340794 +0.37871852144599 -0.961705703288317 +-0.478621114045382 1.40059468522668 +-0.0425610728561878 -0.853261288255453 +2.53069310635328 1.59227794408798 +-1.87901111319661 -0.358152363449335 +0.954459019005299 1.39409958571196 +0.324125435203314 -1.40645006299019 +-0.395102594047785 -0.578349638730288 +0.714126072824001 1.09385222941637 +2.26894120872021 2.86285768821836 +-0.525708746165037 1.92314795404673 +-0.439350225031376 -1.20656536519527 +1.85878024995327 0.408548388630152 +-1.34356329217553 -1.16515595465899 +1.88550673425198 1.81114589050412 +2.05061941593885 1.17919966578484 +0.859156642109156 -1.40385042130947 +1.74830450490117 -0.508347071707249 +-1.49666209518909 -1.10139701142907 +1.79499476775527 0.419189963489771 +0.00937820971012115 1.21121746301651 +-1.62085117399693 -0.471252281218767 +1.84430212154984 -0.222130320966244 +0.313748493790627 0.978976618498564 +2.5580178797245 2.22202425822616 +0.149811711162329 1.75097593292594 +0.100270200520754 1.13340963423252 +1.23176250979304 -0.748868733644485 +-0.873373385518789 0.285578969866037 +0.214500464498997 1.89648038521409 +1.5453750230372 1.45198041573167 +-1.32176746800542 1.87009589001536 +-0.216902684420347 1.42689475789666 +2.03494207933545 2.21144592016935 +-0.135794829577208 -1.17052250355482 +1.49550383910537 -0.500416904687881 +-0.989499978721142 -0.840314164757729 +1.12359218671918 1.31851376593113 +-1.38978365808725 0.76328818872571 +2.23130672425032 2.63739766553044 +-1.8116545714438 1.19852881133556 +0.630826715379953 1.13814905285835 +1.69685072079301 2.77633919939399 +-0.464271951466799 -0.822133090347052 +0.958454590290785 -0.427048087120056 +1.24529281631112 0.38800411298871 +2.15577581897378 2.38636680319905 +1.31874823570251 -0.872197724878788 +1.61186712607741 -0.760608464479446 +1.50582986697555 -0.649233177304268 +-1.16123018413782 0.353783454746008 +2.92900170758367 1.94925379753113 +-0.167192131280899 0.43928287178278 +0.889743082225323 1.00621205568314 +0.210233103483915 -0.997596062719822 +1.74519320204854 -0.487294547259808 +-0.679500922560692 1.38434460759163 +-0.146491166204214 0.196907259523869 +1.36268999800086 -0.405691497027874 +-0.753944084048271 -0.0923365391790867 +-1.32451192289591 1.50311958044767 +-1.73169532418251 -0.535160902887583 +0.183907847851515 -1.82662163302302 +1.48520020022988 0.683072898536921 +1.81297934800386 -0.825113583356142 +2.80013621598482 2.39241858571768 +0.851973999291658 1.1710419356823 +2.88015890866518 1.95329306647182 +1.37963550537825 1.72378801181912 +-1.74487737566233 -0.021989319473505 +0.923321183770895 -0.505416136234999 +0.35810911282897 0.171648122370243 +-0.611732892692089 0.628997180610895 +-2.66885285079479 2.43093906342983 +-0.77232750505209 1.796799544245 +1.30631361156702 -0.335120588541031 +0.499342810362577 -1.62093053385615 +0.443861253559589 0.0536024756729603 +-0.770540583878756 1.04491452872753 +0.208602167665958 1.19714096188545 +2.69211648032069 1.44047920405865 +1.42747324332595 -0.774205319583416 +-0.802357636392117 -0.888977598398924 +-0.551691222935915 0.127152539789677 +-0.388330698013306 -1.09207228943706 +2.16791117191315 2.24981886520982 +0.193644858896732 0.806783203035593 +1.35509423911572 -0.441257961094379 +1.19813763350248 0.00904372334480286 +1.21165931224823 1.86079402267933 +1.26811403036118 1.40932361409068 +-1.9056026302278 1.05506280437112 +1.09377502650023 2.38781935349107 +-1.96020850539207 1.41531054675579 +0.663487669080496 -0.665306996554136 +-1.8456757850945 -0.0194409415125847 +-1.91931500285864 -0.0470988340675831 +-0.637594405561686 -1.69683813303709 +-1.46911622956395 -0.256256550550461 +-2.82381584122777 1.77615419775248 +-0.724260363727808 1.8149929754436 +1.09889047965407 2.37745362147689 +-1.4723867662251 2.72274923697114 +1.9817033149302 0.0610696449875832 +2.43553154170513 1.55298114940524 +-0.692302133888006 1.19863210618496 +-0.206275515258312 0.0856613777577877 +-0.225796446204185 0.892047345638275 +0.942115299403667 -1.5204333178699 +-1.93550775572658 -0.353068083524704 +0.654838521033525 -0.0228586420416832 +0.718102071434259 1.20882242545485 +0.749999020248652 0.618096154183149 +-1.3312373906374 2.67259626463056 +-0.58808583766222 -1.80532194674015 +1.285110745579 2.61369458585978 +-2.30057621747255 1.30949757993221 +-0.214167837053537 0.125181913375854 +1.41330854967237 -0.164014380425215 +2.61858379468322 2.53823513165116 +-1.82938589528203 2.5589129589498 +-2.8707655146718 1.54337161034346 +1.16227375343442 0.928168889135122 +-1.06591911241412 2.03961246833205 +1.6409085765481 1.83075737208128 +-0.107716578990221 -0.551954045891762 +-1.59457765892148 1.63932278379798 +-2.16795053705573 2.07921721413732 +-1.4361627548933 1.40881815180182 +-0.750233829021454 1.23842242732644 +1.8784948810935 1.97354388609529 +2.69149800017476 1.89659919962287 +0.779429562389851 -0.347004380077124 +-1.70491071417928 -1.00703772902489 +-0.83432724326849 -0.365319933742285 +-1.71521957591176 -0.967166464775801 +2.17667028680444 1.2198950573802 +0.870618931949139 1.54036268219352 +0.251090154051781 -0.132912382483482 +1.5877863317728 0.678796026855707 +0.785636354237795 -0.224852934479713 +1.36263806372881 -1.2199660949409 +1.36459283530712 2.50574555248022 +-1.01252736151218 1.64294324070215 +1.14787768572569 -0.823890078812838 +2.596291590482 1.4821464009583 +-0.146839492022991 1.85857572779059 +-0.957799948751926 1.01166198775172 +0.84897394105792 -1.58480857685208 +0.529773037880659 -1.73885920271277 +1.59365494921803 -0.93388469144702 +-2.55761077255011 2.52492552250624 +-1.00085066631436 1.2092454507947 +0.742211189121008 -1.14398978278041 +-0.328856363892555 -1.55320810899138 +-1.02509764581919 1.52784418314695 +-2.05574489012361 1.35704055801034 +0.81254443898797 -0.0315317176282406 +-0.257675908505917 -0.178346656262875 +-0.67271126434207 0.731910966336727 +1.37309474125504 1.71898373961449 +-0.906342931091785 -0.848964512348175 +-1.0287752635777 -1.34678086638451 +-2.25897191092372 1.44886104017496 +-0.598325457423925 -0.0408293828368187 +2.32446739077568 1.48157031461596 +-1.4871674887836 -0.864731453359127 +0.442404374480247 0.84542427957058 +-2.90436279401183 2.09229874610901 +0.304890420287848 -1.81511974334717 +1.88274436071515 2.18269338458776 +-0.920551784336567 -0.365165416151285 +1.14236346632242 -1.29492803290486 +1.04628176987171 -0.545630246400833 +0.399722006171942 1.84942538663745 +-1.00721666589379 0.163407508283854 +1.40371734276414 -0.268044795840979 +-0.414731193333864 1.27007945254445 +-1.54506851732731 0.555353846400976 +-0.104042284190655 1.70410278439522 +-1.30641648173332 2.18044982478023 +1.24231900274754 -0.37263298407197 +-0.231576219201088 -0.893185991793871 +-0.78636497631669 1.51184230297804 +-0.231641631573439 1.19101314991713 +-0.936298809945583 -0.838305097073317 +0.78686735406518 0.062991090118885 +0.180524613708258 -0.651404362171888 +-2.19469415023923 1.09565394371748 +0.788988001644611 0.933427304029465 +0.291592765599489 0.749692879617214 +0.167921397835016 0.676627937704325 +-0.612701941281557 1.50024525821209 +-0.134262174367905 -0.196316670626402 +0.133057925850153 -0.417828556150198 +1.250812407583 0.748687732964754 +1.91872930154204 2.65982663631439 +1.78028824180365 -0.673218831419945 +0.571906767785549 -0.541913568973541 +1.77438765764236 1.98131423071027 +0.662845615297556 1.18991031870246 +-0.909465260803699 1.0986944437027 +-1.13336053863168 -1.49439633265138 +1.5435923486948 2.85983864963055 +0.0603081919252872 0.75813976302743 +-2.15522218123078 2.13374704867601 +-0.27484630048275 1.24552789330482 +-1.58977943658829 -0.0374957323074341 +2.77542039752007 1.71519975736737 +1.64606515318155 2.12922851368785 +0.886503338813782 -1.78284951299429 +2.72952016070485 2.00538262724876 +0.843258656561375 -1.2337305136025 +-2.65595661103725 2.33931179717183 +1.48117453232408 1.22620160877705 +1.72258114442229 1.51937655359507 +1.68386142700911 -1.05569622293115 +1.46284241974354 -0.705050360411406 +-0.588659510016441 1.4270611256361 +1.96614512428641 1.98830921202898 +0.734626423567533 0.402017414569855 +0.915748301893473 -0.504114560782909 +1.42900901660323 1.66078344359994 +2.25488945096731 2.90158635750413 +0.955485995858908 -1.75332788750529 +-2.88088296353817 2.26876648887992 +2.13912281394005 2.52278220653534 +0.287582330405712 0.00384973362088203 +-0.461033016443253 -0.432767115533352 +2.81660261377692 2.27390728145838 +1.49667909368873 -0.48365531489253 +-2.29053501039743 2.48293719813228 +-1.11716188490391 0.289610158652067 +0.333971489220858 0.0444956198334694 +-1.3790140375495 -0.727075684815645 +-0.796113330870867 0.967301711440086 +0.366976842284203 -1.64739779010415 +-1.28612922504544 -1.14733279496431 +0.37472839653492 -0.901894651353359 +-0.975736565887928 1.17933520674706 +0.605364181101322 -1.61345959454775 +-2.73813807219267 1.7968438602984 +1.57255982980132 1.24518446996808 +1.779820676893 1.28376130759716 +0.285859815776348 0.691839635372162 +-0.910921044647694 -0.594001829624176 +-0.823106937110424 -1.05418333038688 +0.104728970676661 1.74921382591128 +-2.38141652569175 1.12706491351128 +-0.83786728233099 -0.175873752683401 +2.11039216071367 1.83155750855803 +-1.99030840024352 1.51974556222558 +2.68357738479972 2.48060549423099 +-1.1845739595592 0.326227597892284 +-1.7077529579401 2.62458549067378 +-1.23910231888294 1.22679214179516 +-0.124209456145763 -0.468802977353334 +-1.1875561773777 1.26572915911674 +-1.60731089115143 -0.422453884035349 +-1.14812042564154 -1.17296344786882 +-0.884335368871689 -1.00313790887594 +0.723984904587269 0.207578588277102 +-0.0211784578859806 0.759385578334332 +-2.3401742130518 2.87697842344642 +0.482478722929955 0.578970745205879 +-2.80467076599598 2.5091815367341 +-0.152245610952377 -1.98909625038505 +-0.976488165557384 0.765861105173826 +0.358756449073553 1.77729746699333 +1.40397672355175 -1.33897558227181 +-2.73336103186011 1.93623175472021 +1.33247448503971 -0.801424868404865 +-0.771697003394365 -0.867804300040007 +-1.1027612388134 1.39652150124311 +-0.209395322948694 -1.08475765585899 +-1.9979960732162 1.9581238925457 +2.07440412417054 1.3293282687664 +-1.93831306695938 -0.0201470404863358 +1.4558743275702 0.454823911190033 +0.999020967632532 -0.583084352314472 +-1.10364705696702 -0.968060072511435 +-2.12873597070575 2.42020189762115 +-0.806119713932276 -0.558744192123413 +-1.40649508312345 2.07905009388924 +-1.57709585130215 1.44691667705774 +-1.69501673802733 0.722719877958298 +1.12624054402113 0.0203857608139515 +-0.106229163706303 1.46423427760601 +1.66349943727255 0.0891099311411381 +1.0056647323072 -0.449528533965349 +1.17581545561552 2.35557828843594 +2.85731463879347 1.61511135846376 +0.0460674688220024 0.993230998516083 +-2.00231520831585 1.78404529392719 +-0.857668086886406 -0.603680722415447 +0.212190877646208 -0.0847933255136013 +2.08787521347404 2.40160214528441 +-1.48174329474568 -0.511398892849684 +-1.23005585744977 -1.15711344406009 +1.60670629888773 0.144752860069275 +0.611652843654156 1.09040654450655 +0.0687106512486935 1.57068181037903 +0.973474074155092 1.72988789156079 +2.52547753229737 2.24037718400359 +2.12066468223929 2.70268177241087 +-0.0484584085643291 1.39731380343437 +-1.61877960339189 2.90682883933187 +-1.7491229660809 -0.778733663260937 +0.774491168558598 1.33957181870937 +0.114488948136568 1.42440451681614 +-0.76437946408987 -1.79316079989076 +-2.02736221253872 2.26537892222404 +-1.36571399122477 1.04741927608848 +2.26948389038444 2.52961926162243 +-0.806051567196846 1.47987924143672 +-1.54564606770873 0.673107825219631 +1.90441290289164 -0.549355231225491 +-0.323343329131603 1.58480306342244 +0.51525042206049 0.346732381731272 +2.17656968906522 1.03988980129361 +-1.04702944308519 0.146956086158752 +-2.84862881526351 1.64181040972471 +-1.1561822630465 -1.44025735929608 +0.384719599038363 -1.64739570766687 +-1.9167810715735 1.0153863504529 +-0.764548536390066 1.81304601207376 +-0.0982047691941261 1.813994217664 +2.52984330430627 1.34231220185757 +2.89486844837666 1.95676428452134 +2.88041764870286 1.5644040517509 +0.254188776016235 1.51305732503533 +1.2805294804275 0.120650105178356 +-1.69608326628804 0.0137143023312092 +1.42758256942034 1.96696586161852 +1.33042541518807 -1.17135084047914 +-1.8801090605557 0.5263367369771 +1.13387015461922 0.820474114269018 +-1.07915004715323 1.01671554148197 +1.48597346618772 1.63048772886395 +1.50408792868257 1.54940662905574 +-2.4542132653296 1.69421143084764 +1.38832054287195 1.14387844875455 +1.75520042330027 2.04135481268167 +-1.17672529816628 1.87263363227248 +-0.01735720038414 -0.132140222936869 +-1.61478655412793 1.87356888130307 +-0.309962596744299 0.660954315215349 +2.20934700965881 2.22116094082594 +-2.46272791922092 2.46215682476759 +1.80619774758816 2.01026545464993 +-1.43536208942533 0.606022253632545 +-1.73742507025599 -0.508309822529554 +-2.0013825558126 2.3458355255425 +0.533945962786674 -0.452988084405661 +1.28323761001229 -0.608238104730844 +-0.891486573964357 1.12702103704214 +1.51443880796432 0.38937396556139 +-0.929268095642328 -0.225581336766481 +1.33460436016321 1.53977439180017 +-1.91680308431387 1.21642768755555 +-2.64733371511102 2.6652738712728 +0.0621351078152657 1.98383947089314 +-1.58726055920124 0.241737522184849 +-0.011457908898592 1.35303677991033 +1.57809987291694 2.18806220591068 +-1.54139747470617 -1.25315822288394 +1.13442904874682 -0.272605936974287 +-0.516586482524872 -1.81004797667265 +-0.442781109362841 -0.075520571321249 +-1.17415972426534 -1.22637978568673 +0.491233013570309 0.812948312610388 +-0.267092142254114 -0.408651396632195 +2.63292656093836 1.59315424412489 +-1.10870075598359 -1.34610893949866 +-2.58903631940484 1.85086574405432 +-0.0182708576321602 1.2258055023849 +-1.46989291906357 -0.482244189828634 +-0.106721688061953 0.604912262409925 +-1.18913446739316 -0.42220875993371 +1.2769967019558 1.1977108977735 +0.362223550677299 -1.7522339001298 +0.984295953065157 -0.0474527031183243 +-1.33436021208763 0.215641874819994 +-0.699580814689398 0.362101767212152 +2.78464812785387 1.91815891861916 +0.228782624006271 -1.72433909401298 +-1.81500281766057 0.806421231478453 +-1.07556383311749 -0.284285016357899 +-1.56682952865958 0.280237857252359 +-0.977108031511307 0.270161170512438 +-0.167716830968857 -0.971038773655891 +-2.34170935675502 1.65247280150652 +-1.40580565854907 1.23170851171017 +-0.651007331907749 1.09626273810863 +0.940711803734303 -1.69152277708054 +-2.66293790191412 1.69267027080059 +-1.19879751652479 -0.718646947294474 +2.24167885631323 2.77986229583621 +-1.01541655138135 1.47018028050661 +1.41071298718452 -0.0510727576911449 +0.219346575438976 -0.861910942941904 +0.659375287592411 -0.0785036236047745 +-0.280180782079697 -1.42857759445906 +2.24208739027381 1.31328578293324 +-0.405220914632082 0.696090012788773 +0.213812310248613 -1.73564483597875 +-0.593460846692324 0.780115108937025 +1.5017857812345 -0.528658013790846 +-1.27037632837892 -0.10525755956769 +-1.14273864030838 0.346882212907076 +-1.18504957482219 1.27822986245155 +-0.306376982480288 -1.81502694636583 +1.37392682209611 -1.45243813470006 +0.554716359823942 -1.56558381393552 +-1.13874449208379 1.06046610325575 +-1.20791961997747 0.854293014854193 +1.32906229794025 -0.489615269005299 +0.448300585150719 1.24714457988739 +2.73381044343114 2.01813814789057 +-0.354762729257345 -0.289955407381058 +-1.49942932277918 2.75285452231765 +0.100593943148851 -0.95323470979929 +0.176947806030512 -1.20076877623796 +0.956967562437057 -1.39249560981989 +1.9457249417901 1.26027692481875 +-0.750552903860807 -1.24275388196111 +-2.11385520547628 1.77150790393353 +-1.16288026049733 1.22170064598322 +1.22716143727303 -0.606911916285753 +0.109755299985409 0.68184657394886 +-1.73263110220432 1.09293292835355 +1.94530440494418 1.91540180891752 +-1.15004150569439 0.674392331391573 +-1.92224616184831 0.444126687943935 +-1.32409057021141 1.72198455408216 +0.932368628680706 -0.548008780926466 +-1.30022368207574 -1.21816616505384 +2.75695678591728 1.35465694963932 +-0.802638668566942 -1.76418712735176 +0.720328185707331 -0.414885513484478 +-1.1584729552269 0.088740948587656 +1.20938759297132 -1.57025018334389 +-1.53087161853909 0.479916967451572 +-1.77453901991248 0.836235910654068 +1.49006712809205 -1.01675363630056 +-1.04757906496525 2.28137902915478 +-2.88343966752291 1.70166708528996 +-1.67202346399426 -0.508517280220985 +0.799075525254011 -1.06793490797281 +-1.79523884132504 -0.411252174526453 +-0.275138586759567 -0.890490628778934 +1.87303712591529 1.03998792916536 +-1.81883070245385 -0.770597707480192 +2.04085912555456 2.10473658889532 +1.37932920083404 2.77618318796158 +0.159704800695181 -1.78148379176855 +-2.47859755158424 1.89896140992641 +-0.369118455797434 -1.0831924341619 +1.37789061665535 1.15662802383304 +-0.36779011413455 0.391380451619625 +0.653758212924004 0.0292863547801971 +1.29078165814281 2.69200912490487 +1.03547664731741 0.310575537383556 +-1.34879859164357 -0.45894318073988 +1.61296778172255 1.23217060789466 +0.561416443437338 1.21742410957813 +2.06879713386297 1.87893046066165 +0.170414786785841 0.566636126488447 +-0.769214432686567 -0.0738566182553768 +-0.52552243322134 -0.54743380472064 +1.51468607410789 -0.236435241997242 +-1.23681088164449 -0.214474212378263 +0.700137678533792 -0.137890662997961 +0.232495915144682 0.738477788865566 +0.243623040616512 -0.304031766951084 +2.39675883203745 1.46972893923521 +0.632519576698542 1.52202996239066 +-0.649610586464405 -0.916152786463499 +0.470991976559162 0.998199924826622 +0.590636864304543 -1.36877365410328 +0.463722322136164 0.250982224941254 +0.825104437768459 0.333102226257324 +1.39735870435834 1.04174628853798 +-0.206214763224125 -0.378548834472895 +1.08103916794062 0.976098999381065 +1.48400101065636 -0.910066924989223 +-1.85118143260479 0.708690382540226 +-0.565950501710176 0.625560741871595 +-0.823683697730303 0.278628386557102 +-0.873309917747974 -0.754590302705765 +1.55529597401619 1.33674847707152 +-1.74192931875587 2.80977919325233 +-0.714297119528055 0.665078386664391 +1.05709943175316 0.0479051508009434 +-1.71932093426585 0.630604956299067 +1.77689529210329 -0.500825833529234 +0.582010496407747 -1.42905256524682 +-0.319995757192373 -0.548368345946074 +0.396011661738157 -0.377191983163357 +-0.222157120704651 -0.300181284546852 +-1.48089446872473 -1.0813753195107 +0.326244611293077 -0.64968279004097 +-1.50954092666507 -0.486991383135319 +0.817699555307627 -0.498494185507298 +-2.14065649360418 1.60626352950931 +-0.477922696620226 -1.18297161906958 +-1.74971666187048 1.36385659128428 +0.86207827180624 0.186773005872965 +-2.88186955824494 2.03486770018935 +0.0584888346493244 1.06556739658117 +1.41413897648454 0.445815607905388 +1.0246768258512 0.958350408822298 +0.59256686642766 1.41820696741343 +1.44288720563054 -0.0166402049362659 +-0.652561113238335 -0.508260790258646 +0.403811205178499 -0.0542031452059746 +1.86661488190293 1.44520040974021 +1.59227151423693 0.726458314806223 +-1.72743259742856 1.07654907926917 +-0.348737996071577 -1.55616420507431 +1.8226753436029 1.34290179610252 +1.23731549456716 0.0249195769429207 +-0.994962804019451 1.29734811931849 +2.77047290280461 1.60680848732591 +1.71377348527312 2.90838585048914 +0.636910039931536 -1.01009327545762 +0.645866874605417 0.304849199950695 +-0.389709025621414 1.20820839703083 +2.16575931757689 2.53462814539671 +-0.0697886198759079 0.279298566281796 +0.824261005967855 -0.951204121112823 +0.23216649889946 0.1627405770123 +0.151991598308086 -1.73444369062781 +0.889385126531124 -0.474071178585291 +1.78851625695825 2.5013146288693 +2.04314283281565 2.16521344333887 +-0.203443065285683 -1.07459056004882 +-0.344551514834166 -0.139745447784662 +1.34165865182877 1.68604994192719 +0.344412036240101 1.03601999953389 +-0.962462574243546 -0.546079341322184 +-1.83921800926328 1.81084182113409 +0.581896536052227 0.470592677593231 +0.60276248306036 -0.932877015322447 +-1.72010714560747 2.00662796571851 +-0.570069700479507 0.679780177772045 +-1.98096702247858 1.07481580972672 +1.28311189264059 -1.0176078081131 +0.918729070574045 1.04906477034092 +0.190583743155003 -1.30005686357617 +-0.614699587225914 1.60986620932817 +-2.2866636030376 1.55763847380877 +-1.56808182969689 1.50426350906491 +-0.336312301456928 0.476349886506796 +1.86089237779379 0.520292226225138 +-1.31936386972666 0.567550659179688 +1.71694516018033 -0.374033767729998 +0.740244954824448 -0.406750220805407 +-2.63814443722367 2.22630811482668 +-2.56965424865484 1.6463201828301 +1.98521029949188 1.14476111531258 +1.17944970726967 1.0786580555141 +1.61847940087318 1.3662930727005 +2.58270037919283 2.57059525325894 +1.42869425937533 -0.95961806178093 +-2.34599041193724 1.43547162041068 +-1.20825568214059 0.941836193203926 +-2.40933400392532 1.48687404766679 +0.642058733850718 0.510078497231007 +-0.0856090001761913 1.94624486193061 +0.538059320300817 -0.123834766447544 +0.459427136927843 1.70859317854047 +-2.72991582006216 2.2203439809382 +1.37347070127726 -1.18482312187552 +0.860227782279253 -1.33064590767026 +0.197903331369162 -0.0127020329236984 +-1.93055010214448 2.0349305793643 +-0.895426064729691 0.534266371279955 +0.237071663141251 0.909752245992422 +-1.9636841788888 0.189619041979313 +-2.01065504550934 1.4920190423727 +-0.138917092233896 -1.3262345790863 +0.345381032675505 -1.70226432010531 +-0.423854406923056 1.59277452155948 +-1.64821251481771 0.625669792294502 +-1.65669286996126 -0.696639779955149 +-2.48839632049203 1.18793753162026 +1.14425402134657 1.25053752958775 +0.255539800971746 -1.2057641223073 +-1.74013828858733 0.229272618889809 +-1.485151540488 1.66348664462566 +0.213856045156717 0.273590631783009 +1.97715199366212 0.0167334116995335 +-1.07304240763187 -1.277029607445 +-1.69886174798012 1.29075204208493 +-0.319024320691824 -0.261004727333784 +-1.72671138867736 1.54124293103814 +-0.0221957750618458 -0.79931902885437 +0.55357288569212 -1.44415978342295 +-0.694351952522993 -0.871393423527479 +1.19013844430447 2.4456170424819 +-2.05635992065072 2.32129456847906 +2.3470307290554 1.26997547596693 +-2.90147399529815 2.30455252528191 +-1.04202116280794 0.757331594824791 +1.42260562255979 1.14209574460983 +-0.409668564796448 1.17644890770316 +-1.19820533320308 2.13883887976408 +-0.212645776569843 -0.353138439357281 +0.682033237069845 -1.55658942088485 +1.05738754197955 2.09548281878233 +2.52758413180709 1.96773656830192 +1.27787172421813 -0.307180933654308 +0.886197548359632 0.609604120254517 +-0.575805302709341 1.40276233106852 +1.15352774038911 -0.961484741419554 +1.58926624804735 0.0919618159532547 +0.455788765102625 1.56847905367613 +-2.24899875745177 2.4552356004715 +1.75495069473982 1.57018894329667 +1.34150743484497 -0.25926786288619 +-1.42365857213736 2.20648014545441 +0.0890674293041229 0.000287424772977829 +0.715366076678038 1.67587860301137 +0.835227627307177 -0.737543601542711 +0.453055486083031 1.94252448901534 +-2.04303615912795 2.89809890091419 +0.339856572449207 1.64629746973515 +-0.820706374943256 0.273290891200304 +-2.0191794000566 1.76011408120394 +-2.28815577179193 2.48443923518062 +0.724593859165907 -0.114715207368135 +-2.1275205090642 2.53637117519975 +2.79700756072998 2.46201687306166 +-2.91004760935903 1.96860541030765 +-1.37932300940156 -0.360882338136435 +-0.857186183333397 0.562134925276041 +0.956059772521257 -0.380499728024006 +-0.52379085123539 1.88069343194366 +1.86591764912009 0.454356536269188 +0.115508060902357 -1.36556134000421 +2.7944574393332 1.45868691056967 +1.37775932997465 2.77527860552073 +1.33150111138821 -1.03140095621347 +-0.768940471112728 0.222927004098892 +-2.58208280056715 1.46305055916309 +0.785900685936213 1.08905581384897 +-0.776648823171854 -0.141176775097847 +2.47709801793098 1.94248146191239 +2.17319045215845 1.4354276061058 +1.33227280899882 1.65466177463531 +-0.274067986756563 -1.88904051482677 +0.344518221914768 -1.87160620093346 +-0.497877702116966 1.42005406320095 +-1.51158518716693 0.279772687703371 +0.723672553896904 0.926269438117743 +-2.40180294215679 1.4179679453373 +0.0678855217993259 1.12117820605636 +0.344261880964041 1.92204321548343 +0.477840799838305 -0.234347868710756 +-0.937936961650848 -0.781560368835926 +-1.15391851589084 1.29911101609468 +-1.62856407463551 2.42734021320939 +-1.97161433473229 2.73753204569221 +-1.94153457507491 -0.321690138429403 +-0.508317951112986 -1.30991568043828 +-1.06496314704418 -0.931523997336626 +-2.58007526025176 2.34283326566219 +2.82334679737687 1.71734195947647 +0.529734812676907 -0.455270130187273 +1.50503816083074 -0.918114095926285 +1.56300611793995 -0.211321994662285 +1.28373529016972 -1.48647388070822 +-1.30062136054039 0.0196338482201099 +1.20363237336278 -0.65967320650816 +1.53332705423236 1.49214838072658 +-1.71455581486225 2.47656869888306 +1.22900435328484 1.21752382814884 +-1.61753913387656 -0.880302488803864 +0.165655422955751 1.06339140236378 +-2.29668224230409 2.31376715376973 +-1.47021249681711 0.225431978702545 +-2.36213478818536 2.09966435283422 +1.05153727903962 2.25530040636659 +1.17786652222276 1.8012652695179 +-0.0622869916260242 -1.89502627030015 +0.729040823876858 -0.710815671831369 +1.69549231976271 1.85620391368866 +2.1523706279695 1.23775978386402 +1.45786710456014 1.52334039285779 +-1.67810813710093 0.790088199079037 +-1.13307494297624 2.13556850701571 +-1.4520391151309 1.99991558119655 +0.310260649770498 -0.698613714426756 +1.48194794356823 0.469821948558092 +0.313009947538376 -1.93044844269753 +1.77928545325994 -0.772850502282381 +-1.0591834411025 0.753962285816669 +-1.74035127833486 2.73568035662174 +-0.765960302203894 1.33227364718914 +1.2311534024775 1.2338029704988 +-0.338072817772627 -0.536280494183302 +-0.0493500158190727 1.77414136007428 +-0.340701516717672 -0.530244264751673 +1.60373703762889 -0.91962556540966 +-1.3951579220593 1.39883862435818 +1.9614544659853 1.89828164875507 +0.924218159168959 -0.756026979535818 +-0.84309745579958 -1.20304860547185 +0.570804331451654 -0.943850375711918 +-0.699526097625494 0.0223709531128407 +-0.875719040632248 0.196104601025581 +-1.72877121344209 2.60649118199944 +2.78019555658102 2.45823834091425 +1.24220585823059 -0.473383236676455 +-2.71729859337211 1.31745962426066 +2.83184543251991 2.27851599827409 +0.270559083670378 0.492461573332548 +-0.907657071948051 1.70739825069904 +-0.167800299823284 -1.7411447763443 +-0.538969729095697 -1.01378022879362 +1.69823522865772 2.74204523488879 +-0.21199531853199 1.68685633316636 +1.57026622071862 0.0360033512115479 +1.50411317870021 2.48971028998494 +0.931985508650541 -0.215817362070084 +-0.883255280554295 -1.49524860456586 +-1.17391914874315 1.4219076372683 +1.15381288900971 1.85416286066175 +-1.45556512847543 1.09430568665266 +-1.25514779984951 0.438324838876724 +0.0715929232537746 1.80805894732475 +-2.52838743850589 2.2293014228344 +-2.71313989534974 1.84131486713886 +-0.968578819185495 0.421952567994595 +-1.2464666441083 0.689135510474443 +0.535265009850264 1.18374351784587 +0.789308063685894 0.976799592375755 +-2.44606406614184 2.8364293128252 +-1.17782397195697 0.937423657625914 +0.0740038752555847 0.734560642391443 +1.77172403410077 0.438765987753868 +0.246375389397144 -0.400510415434837 +2.78136451169848 1.46182327717543 +-0.55787219107151 0.761340711265802 +-1.71743490174413 0.9940505027771 +1.40826803073287 -1.31733487918973 +-0.453054886311293 -1.0992496535182 +-0.300243500620127 -1.28626397624612 +1.24190063402057 -1.14660358428955 +-0.745532270520926 -1.13779936358333 +0.122467417269945 -1.98192391172051 +0.403695303946733 0.761751793324947 +1.85238265618682 1.72350120916963 +0.910418264567852 1.09340703114867 +0.90762647241354 1.2066346667707 +1.04697046801448 1.52703953161836 +-2.90958639979362 1.96746077015996 +-0.474552132189274 1.82256393134594 +-0.461465947329998 -0.0675218999385834 +-0.690599206835032 1.36647133901715 +-0.20082725584507 0.982700020074844 +-0.354946196079254 -1.80944062769413 +2.24835130572319 2.15761810913682 +2.84661301225424 1.75481754168868 +1.53677939996123 2.42455476149917 +1.59954442083836 -0.60473845526576 +-1.56738646328449 2.8124819919467 +0.0202366821467876 1.56865410134196 +0.731437847018242 -0.213801234960556 +-1.88840515911579 -0.252409566193819 +0.50103310868144 -1.43448544293642 +1.69719592481852 -0.57636359706521 +-1.87281879037619 2.46435150131583 +0.198508266359568 -1.58546353131533 +-0.0107548348605633 -1.56412705779076 +0.0670644231140614 -1.7435343042016 +2.21374751999974 2.62516567111015 +0.185091070830822 0.0558304525911808 +-0.282945096492767 0.064533606171608 +-0.692931637167931 1.23959053307772 +2.44647473469377 1.46035656705499 +0.398416325449944 1.54593525454402 +-1.1274655982852 1.79432262852788 +-1.34709008783102 -0.685161292552948 +-0.162260577082634 -1.72360373288393 +1.20392828434706 0.971559729427099 +-0.952574152499437 0.760222312062979 +-1.51738142594695 0.755897477269173 +-0.0618931315839291 1.54406813159585 +-1.01479284465313 -1.53843135386705 +0.0549571476876736 -1.75545589998364 +-1.12079690396786 -1.4833892993629 +1.22591653466225 2.40811822190881 +-0.794736057519913 -0.6828245036304 +0.760214023292065 -1.50887004286051 +-1.73608003929257 2.00579699501395 +2.05689832940698 2.73117042705417 +1.24496367573738 2.12902756035328 +-1.23987824842334 2.18617404252291 +-2.06657461076975 1.68183764442801 +2.54874340817332 2.25832796096802 +-1.39264031499624 -1.18786812201142 +1.8492073379457 2.8502111621201 +2.47581090778112 2.04724755883217 +-0.39014271274209 -1.2873845435679 +1.36955620348454 0.0219872333109379 +-1.12265603616834 0.875563681125641 +-1.52233500406146 -0.621543228626251 +1.68075265362859 0.630292661488056 +-1.29896451532841 1.73986462131143 +0.593366868793964 -0.411695521324873 +-0.419229831546545 0.839527372270823 +-1.14772886037827 0.251891255378723 +-0.962494451552629 -0.528158757835627 +1.86995922774076 2.46966538950801 +-1.69170490652323 2.24430044367909 +0.616669423878193 1.15330833941698 +0.669159021228552 0.180726017802954 +0.31551568210125 0.61780372262001 +-0.291993021965027 1.94897171109915 +-0.00981470197439194 1.90527640283108 +1.14513040333986 1.13836887478828 +-2.48709348589182 1.72358466684818 +-0.520791474729776 -1.46447251364589 +1.96928956732154 2.06225638464093 +-0.321784723550081 0.0455602891743183 +1.11636908724904 -0.55109853670001 +-0.634184204041958 1.62465716153383 +-0.233535833656788 0.424602206796408 +0.429238602519035 -1.01449808105826 +-2.93475391715765 1.95654537528753 +2.24958636984229 1.16004998236895 +-1.90545721724629 2.44428309798241 +1.84401315823197 1.21068027615547 +0.865958970040083 -1.75781208276749 +1.65166252106428 1.2428173199296 +-0.255253493785858 1.69553644955158 +0.958854775875807 -0.518418971449137 +1.53976755216718 1.84834608063102 +-0.396072089672089 1.53375567495823 +1.14350999519229 1.53722698614001 +1.36081284657121 1.62782852724195 +2.37027900665998 2.75563517957926 +2.79318123310804 2.42714469507337 +1.08130984008312 -1.57426323741674 +1.66437162831426 0.338872872292995 +0.550496183335781 0.0753199905157089 +1.2675904892385 1.16364324837923 +-2.2019915394485 2.6763704046607 +1.13104872033 -1.11603409051895 +-1.24109536781907 1.5648830793798 +-1.22520404681563 0.960967216640711 +-1.2560037150979 0.27281915768981 +1.30608120560646 1.7890408821404 +-0.959182307124138 -1.07047866284847 +1.00773581117392 -0.771922282874584 +-1.85960478708148 2.83260897919536 +-2.36360209435225 1.34929390251637 +-0.21733633056283 1.47324853762984 +-0.326439768075943 -0.872647602111101 +0.244743585586548 -0.28065524622798 +-2.46507734805346 2.08112749457359 +2.12605695426464 1.73076891899109 +2.16134568676353 2.80804978311062 +0.89805331081152 0.643511619418859 +-0.965671237558126 0.855416309088469 +-2.63010683655739 2.35672772675753 +0.268972214311361 -0.947771724313498 +-1.11988061666489 -0.0933411531150341 +-1.90122552216053 1.31674107536674 +-0.737363319844007 -0.862982131540775 +-0.0714111626148224 0.577652707695961 +1.12269151210785 -1.03969479724765 +-1.32399569451809 -1.11271368712187 +-1.00233935192227 0.63940204679966 +-2.35267616435885 1.08056267723441 +-1.34660366922617 -0.164756774902344 +-2.213443800807 2.70522528886795 +2.05172187462449 2.13695424050093 +-2.07727495953441 1.98884826526046 +0.679170109331608 -0.748801223933697 +1.50360184162855 -1.10220401361585 +-1.6019938737154 0.616350516676903 +-1.33445123583078 1.35242111235857 +1.43040638417006 0.53570456430316 +-1.49109214171767 -0.32837437838316 +2.87727523222566 2.09785425290465 +-1.84911384433508 0.57144907861948 +0.567801412194967 -1.67491998896003 +2.33619615063071 1.95260488986969 +-1.69326510280371 -0.400961689651012 +0.0352791845798492 1.30094088613987 +0.915619798004627 1.09820531308651 +1.25542875379324 -0.0693541169166565 +0.153729632496834 1.95820845663548 +2.94619394093752 1.88178231939673 +-0.401465475559235 -0.198477774858475 +-1.2805356644094 -0.992118544876575 +0.380043469369411 1.72641753032804 +-1.60174831002951 2.46971498802304 +0.969146206974983 -0.166711665689945 +-2.22521609440446 1.45425165444613 +-0.00617775693535805 0.861510127782822 +-1.64743112400174 -0.347371086478233 +0.472916126251221 -1.03337186947465 +1.75715745985508 1.32668318599463 +-0.00799329206347466 1.31603561714292 +-1.31249446049333 2.4116744697094 +0.205580618232489 1.98475814983249 +2.60486225411296 1.80450223758817 +0.425120107829571 -0.818385288119316 +-0.302994932979345 0.219054874032736 +0.264994010329247 -0.806065816432238 +-0.187976624816656 0.720968339592218 +0.137975446879864 0.599507216364145 +-1.2191187851131 -0.824776958674192 +-1.05666132643819 -1.61905192956328 +0.93263790756464 -0.494901079684496 +-1.12306211143732 1.09606756642461 +0.0430957525968552 -1.11822626367211 +1.32492009922862 1.65842120721936 +0.548007015138865 1.76564503088593 +-1.12359651178122 2.37319204211235 +-1.65248123928905 1.9153484813869 +1.67410608381033 0.432959243655205 +-0.985819578170776 1.17214288935065 +-0.146475724875927 -0.80176081135869 +-2.02255187556148 1.97097358852625 +-1.83523605391383 2.83727962896228 +0.665113527327776 -0.850884433835745 +1.52938690781593 2.71250328421593 +-0.817462142556906 0.320795577019453 +-0.891031909734011 0.828735444694757 +-0.0749857053160667 1.31830655783415 +-2.46306101605296 2.79506047815084 +-2.58842600509524 1.48355710506439 +1.7514126971364 0.166018925607204 +1.16146966069937 1.31003978848457 +-1.60346744209528 0.373932734131813 +1.58726412802935 -1.1445500254631 +-1.7870487831533 2.56368425861001 +-0.84052037447691 1.0243161059916 +0.0203528627753258 0.488578177988529 +-1.79684330895543 2.55449936911464 +-1.57163673639297 1.0700161755085 +1.448874887079 1.48665998503566 +1.32865772396326 0.157637108117342 +1.6875743791461 0.0803839489817619 +0.105053503066301 1.92537866160274 +-0.59469997882843 -1.89071990549564 +0.518820710480213 0.146228093653917 +-0.124833695590496 -1.50835406407714 +-0.493662144988775 0.674800984561443 +1.52356450259686 -0.600353762507439 +1.53288056328893 2.29829448089004 +2.01232304424047 2.7532178722322 +0.93597774207592 1.58684292808175 +1.61966079846025 0.142074052244425 +-1.18669037520885 0.307359907776117 +-2.28914580121636 1.61424346268177 +-1.29999284818769 1.61958968639374 +-0.891129270195961 -0.533327776938677 +-0.389567270874977 0.746519170701504 +-0.00670206919312477 -1.62156848981977 +1.24555337429047 -0.864229537546635 +-0.482331618666649 1.45330214127898 +0.230390042066574 1.68793528527021 +0.873127032071352 1.37302258983254 +-2.52372413501143 1.94835159927607 +-0.813874743878841 -1.01869543269277 +0.77679455652833 1.08896959200501 +-1.67504642903805 -0.106921821832657 +0.291903775185347 -1.36916213855147 +-2.43771054595709 1.8589288033545 +-2.46786682680249 1.65814634785056 +0.530217580497265 0.845372054725885 +0.209880754351616 1.38565022498369 +0.703180726617575 -0.290726020932198 +1.3365698158741 0.239704616367817 +2.36196250095963 1.74106182530522 +-0.411545384675264 1.14503682777286 +0.444980774074793 -1.71623821556568 +0.915343143045902 -0.428050484508276 +0.112749550491571 0.541710581630468 +-0.485721498727798 1.22561280801892 +-0.874411020427942 -1.0399131923914 +1.45576563104987 2.63782278075814 +0.922623075544834 0.821765393018723 +-2.73803209140897 1.39711666107178 +-2.62049547582865 2.64745804667473 +0.3240787088871 0.272916536778212 +1.07997346669436 0.10303695872426 +1.32372396066785 1.68290766701102 +-0.343684229999781 1.22317755222321 +1.51650222018361 -0.309085085988045 +1.95787892490625 2.71882850304246 +-1.31741741672158 0.648004364222288 +-1.54294418916106 1.02323402091861 +-0.0972655303776264 1.04595500975847 +-2.70880780369043 1.97760619595647 +-0.773748695850372 -1.1261550039053 +1.14361879974604 -1.50083089992404 +-0.121587004512548 -1.74034046754241 +-2.54584395885468 2.07104491442442 +-1.51466218754649 2.51972312480211 +1.37327092513442 1.80274728313088 +-1.49017160385847 1.70048684999347 +-0.925149422138929 -0.225555434823036 +1.83723958209157 -0.0696011111140251 +-1.59884200245142 2.0495010279119 +1.29445549845695 0.592102330178022 +0.500161856412888 1.29837522283196 +-0.934836398810148 -1.46802053973079 +-1.48246657848358 2.72564266994596 +-0.350434001535177 0.793269768357277 +0.953104853630066 -1.50339762493968 +-0.381016679108143 0.238457649946213 +1.07906810194254 -0.561694119125605 +0.718729425221682 1.81120902299881 +0.400729015469551 0.188561830669641 +1.63372889906168 2.26255261152983 +-1.24140768125653 2.23170849680901 +1.28359889239073 2.67979443073273 +-0.335758235305548 -0.299137238413095 +1.47356367856264 -0.753591116517782 +-0.489084422588348 -0.207807570695877 +1.17601937428117 0.24180406332016 +1.08277695998549 -1.41824344173074 +-1.76625803858042 2.21744757518172 +0.242004312574863 1.33824835345149 +0.491838119924068 0.884854506701231 +0.0278752706944942 0.958707053214312 +1.41538896039128 0.326420392841101 +0.9345200099051 -0.516160536557436 +-0.415696173906326 -0.0917170792818069 +0.406807392835617 -1.76961397007108 +-1.2439575009048 0.245933681726456 +-1.92994840443134 0.115829989314079 +-0.610255554318428 -1.03889447450638 +2.13806098327041 2.40833294391632 +1.93133029341698 2.41103690117598 +-0.482324920594692 0.168740503489971 +1.18060983717442 -0.231183782219887 +-1.94393123313785 -0.183919917792082 +-0.834807269275188 -0.274258755147457 +1.45048379898071 0.00803879275918007 +-2.58261808007956 2.33977568522096 +2.18003575503826 1.6171492934227 +0.764932740479708 -0.692431770265102 +-0.799969334155321 -1.70418108254671 +1.30217403173447 2.29246623069048 +-0.911499384790659 -0.704033926129341 +-1.05195765197277 -1.23424106836319 +-2.01126936078072 2.93482532724738 +2.62925488874316 2.32739082351327 +-0.145552270114422 -0.441923446953297 +-1.0164133682847 0.998988397419453 +-1.60884764418006 2.38057390227914 +-2.26579861342907 2.58376869186759 +-0.795840989798307 0.705477006733418 +0.519770611077547 -1.03785872831941 +-0.0416020080447197 0.857353866100311 +1.6854933090508 2.1359210498631 +-0.353308372199535 -0.8988425321877 +-0.601121790707111 1.30408434942365 +0.562646083533764 1.02688659727573 +0.389875300228596 -1.68898304179311 +2.05545522272587 1.28387600183487 +-1.49153061956167 -0.510917570441961 +0.757530558854342 0.15566748008132 +1.29348571971059 -0.492735344916582 +1.33286283910275 -1.08027125895023 +1.56072515621781 0.536772079765797 +0.81975232809782 1.69286083802581 +0.991910576820374 1.98686625435948 +0.712091855704784 -0.534358587116003 +-0.967213351279497 -0.341078229248524 +0.925992399454117 -0.431454695761204 +1.51038961112499 2.85181721299887 +-1.69692747667432 0.7123096100986 +-2.71250121667981 1.49679252505302 +-1.01745468750596 1.25012136250734 +1.6739806830883 -0.9831921197474 +-0.537031352519989 -0.305106677114964 +0.0883884839713573 0.0918169021606445 +-0.802525579929352 1.19040975347161 +1.08179592341185 2.15087576210499 +0.0745567381381989 1.38070503994823 +1.14598342776299 -1.18821378424764 +-2.12779810279608 2.01242501661181 +-1.703613165766 2.62982174009085 +-2.18864918500185 2.61913230642676 +1.38494789972901 1.63137398660183 +0.3755475692451 1.68657617643476 +1.06614830344915 0.641513977199793 +-0.270548943430185 -0.327818781137466 +-0.199150718748569 0.827346198260784 +2.22019278630614 1.32400108873844 +1.74296770617366 1.12087880820036 +2.21764127910137 2.43981948494911 +2.2834388948977 1.67238346859813 +0.887990590184927 -0.663364499807358 +1.52407161518931 -0.690158192068338 +-2.76825782284141 2.07751048356295 +0.719515543431044 0.0620783269405365 +-0.243884015828371 0.485258545726538 +2.24361027404666 1.92753091081977 +0.990557473152876 1.25678056851029 +0.985660422593355 0.834152236580849 +-1.09272168576717 -1.07626446709037 +0.551339093595743 1.86239792406559 +-1.7333163395524 2.00862856581807 +2.85851664468646 1.62852170690894 +-0.24569308757782 -1.25303858146071 +1.26167046278715 1.68834135681391 +-2.64750107005239 2.51516953483224 +0.740404844284058 -0.0786540806293488 +0.352980863302946 -1.00623100250959 +-1.56831208989024 -1.16953904554248 +-0.053593672811985 0.805319886654615 +0.448203723877668 -0.0325142554938793 +1.27044494450092 0.690587133169174 +1.48806804791093 -0.967598631978035 +1.17761722952127 1.12233696877956 +0.0101026259362698 -0.559194032102823 +0.709260407835245 1.19985879585147 +-1.31537134200335 0.423020910471678 +-1.38900775834918 1.32479440420866 +2.11414141207933 1.46693830192089 +-1.0238456428051 2.05455801635981 +1.51082843542099 -0.414404325187206 +0.84085601195693 -0.700677160173655 +0.77681752666831 -1.806993894279 +1.98671853169799 2.71468015387654 +-0.105690956115723 -1.2369986474514 +-1.66311693564057 0.172778379172087 +0.757595963776112 -0.0219898037612438 +-2.53993592038751 2.72471570968628 +0.462672788649797 -1.62209073081613 +1.07559450343251 1.15107545629144 +-0.203494012355804 0.0222720801830292 +1.86862663179636 1.24161462113261 +-2.23988113179803 1.01429580524564 +-0.262878891080618 1.47356936335564 +0.299914475530386 0.615165863186121 +1.14961224421859 -1.56140779703856 +1.735401596874 -0.456840042024851 +-0.526582844555378 -0.131077125668526 +-1.55352548509836 0.792419623583555 +-1.32605922222137 1.49822251498699 +-2.10601576417685 1.92525671049953 +-1.42283717542887 -1.3274452239275 +0.657527968287468 1.03831489011645 +-1.8038373067975 0.461807388812304 +-1.06796494871378 1.13320041075349 +-1.69526695087552 1.82598331198096 +0.716089375317097 -0.282606847584248 +-1.5375838726759 1.75207900255919 +1.89382823184133 1.67822148278356 +2.11809450760484 1.785226367414 +-1.70997512713075 -0.609820947051048 +-1.8796372525394 -0.155513018369675 +1.3116729259491 0.568806059658527 +-0.0342989563941956 -0.0822560377418995 +-1.49408828094602 -0.721802540123463 +1.11769405007362 0.895630534738302 +1.30180840939283 1.5417352616787 +-0.430266618728638 -0.813867043703794 +-0.0718193091452122 0.312009043991566 +0.097915705293417 1.24773037061095 +-2.78287687525153 1.46735619753599 +-0.28649516031146 -0.0648425482213497 +-0.427311889827251 1.41013744473457 +-2.38787797093391 1.44476910680532 +1.98580322042108 1.96508297324181 +1.13532027974725 -0.849503334611654 +2.03529190272093 1.4804508946836 +-1.17037537321448 0.493206936866045 +-0.472225490957499 -0.422915805131197 +0.682085726410151 -0.323150020092726 +-2.31349859014153 2.18026293069124 +-1.28301697596908 1.78996935486794 +-1.7774457000196 -0.911135669797659 +1.78591462224722 -0.161127552390099 +2.46710070967674 2.47988922521472 +-1.40705346688628 0.593788575381041 +-1.63144646584988 -1.03995329514146 +-0.415664453059435 -1.04785621166229 +-1.25881304591894 0.130282506346703 +1.66202214360237 1.15019856765866 +0.729800906032324 1.59387190267444 +2.30366064608097 1.44242330640554 +-2.5605057105422 1.99875178560615 +0.442922230809927 1.21064576134086 +0.32542796432972 1.51928688213229 +-0.900794077664614 -1.68970746174455 +-0.181797165423632 1.4853377379477 +1.49522035196424 0.345912337303162 +2.31555346027017 1.25430102273822 +1.4737935103476 0.483013365417719 +-0.518858268857002 0.301459275186062 +-2.03670025989413 2.12152238190174 +1.42510272562504 2.50975769013166 +-1.14196426421404 2.07707012444735 +0.929580852389336 -1.15562091767788 +2.48210314661264 2.69863298535347 +-1.41762647405267 0.481705617159605 +0.484675079584122 0.0311596058309078 +-2.15877659991384 1.94623680412769 +-0.772989980876446 -1.49250507727265 +1.15163937956095 1.85471335798502 +2.49834866076708 1.55160307511687 +-1.31903513893485 1.95499885454774 +-1.11600451916456 -1.01205840706825 +-2.36147516593337 2.21212596073747 +1.63031893223524 2.51693657040596 +-1.73391414061189 -0.266338337212801 +0.210496738553047 0.4935651011765 +-2.25791275128722 2.5849873572588 +-0.782371692359447 -1.03111720457673 +-1.18238020688295 -0.0239138379693031 +1.13009613007307 1.83260165154934 +2.33159659802914 1.25081048905849 +-1.47402970865369 -1.23795059323311 +2.2532029338181 1.17574341967702 +-1.09911868721247 1.06828270107508 +2.16605700924993 2.77899595350027 +1.28291774913669 0.0744047351181507 +0.216433584690094 0.480195540934801 +-1.40145065635443 0.550289761275053 +-1.93922374397516 -0.408653438091278 +-1.30889908969402 -0.893462814390659 +2.54419414699078 2.38705797865987 +-1.78225905075669 0.1100855730474 +-0.108715374022722 1.32850240543485 +0.279404539614916 1.33085398748517 +0.0978993512690067 -0.0258414521813393 +2.37299030646682 1.77642706781626 +0.708114679902792 1.68077214062214 +2.31399211660028 1.66359844431281 +1.07798966392875 1.01876130700111 +-1.88367050886154 1.81750059127808 +0.664036933332682 -1.4263085462153 +0.675382826477289 -1.49083865433931 +-1.80186311528087 1.586674451828 +0.74841296672821 0.13570074364543 +1.47106958925724 -0.264274034649134 +1.82883140444756 -0.117216046899557 +1.73844607174397 2.01850551739335 +-0.798963502049446 0.727358508855104 +1.63997736945748 -0.435922414064407 +1.16260109469295 0.732356235384941 +-1.89544637873769 0.197346597909927 +1.05227708443999 1.2055250108242 +-2.61280719935894 1.44234056025743 +-0.971887707710266 0.943508755415678 +0.126449126750231 0.054670937359333 +-0.43641597032547 -1.91161715611815 +0.731226228177547 -1.72257101908326 +1.03269800543785 1.14582881331444 +-1.82089724764228 -0.418561842292547 +-0.614222854375839 -0.364959068596363 +-0.914643321186304 1.20726733282208 +-2.53980561718345 1.50861969217658 +-2.01678333804011 1.95596086978912 +-2.22276938706636 2.93229659646749 +1.4974431656301 1.42290173843503 +-0.0150116607546806 1.75020979344845 +-0.431945152580738 1.94267344102263 +0.466271210461855 -1.11263241991401 +-1.5163800381124 1.36766292899847 +-0.793108206242323 -0.872558798640966 +-1.64170522987843 -0.0279953889548779 +-1.12945876270533 -0.250063914805651 +-2.21334528550506 1.94799560308456 +1.53323804214597 1.64074813947082 +2.13348986208439 2.33491558209062 +-1.57388530671597 2.3816179856658 +-1.71621830016375 -0.182542145252228 +-1.37514034286141 -1.13491536676884 +2.1816654317081 1.44684983417392 +1.40323359146714 2.47480245679617 +-2.42163144424558 1.93257354572415 +0.59268993884325 1.83892074972391 +1.77042577043176 -0.830537274479866 +0.0741852186620235 1.38700285181403 +-1.43312840536237 0.161900017410517 +0.127926293760538 -0.768027286976576 +-0.320092614740133 1.32529952377081 +-0.330006130039692 -1.19323901459575 +-1.29456519335508 1.65464882925153 +-0.840964924544096 1.67264892160892 +0.721481613814831 0.766090594232082 +0.32193186506629 1.69785653427243 +-0.605422578752041 1.82995454221964 +-0.671570736914873 1.61391198635101 +0.0370635464787483 -0.343947395682335 +-1.30966550111771 2.14230608940125 +-1.20618708431721 0.0538079366087914 +2.7583094201982 2.06296315416694 +-1.38536395505071 -0.528982661664486 +-0.78228010982275 -0.414675567299128 +-1.5252906344831 0.571005884557962 +0.719327040016651 -0.234343111515045 +1.45193177461624 -0.0694344714283943 +-1.47526678815484 2.38882211968303 +0.738969147205353 -0.285691805183887 +1.29697913303971 -0.296471245586872 +-0.258411783725023 -1.04281263798475 +1.88614055514336 0.08962382376194 +-0.4118357822299 0.0649249143898487 +1.20938654243946 2.27330233156681 +-1.69697584584355 1.54825845733285 +-1.2206517085433 -1.4293856061995 +1.9412750005722 -0.177515920251608 +-1.4496197104454 2.59260034561157 +-0.524329528212547 1.64032132923603 +0.81450866535306 0.835896242409945 +2.67218970134854 1.606351390481 +0.620094824582338 1.75383407995105 +0.0733196623623371 0.611228551715612 +1.40390717983246 -0.532887879759073 +1.33578765764832 0.433958601206541 +0.864585980772972 0.90500034019351 +2.80021293088794 2.20257074385881 +-0.278874509036541 -1.34362604469061 +0.878897171467543 -1.13240515068173 +1.10032492876053 -1.50175405293703 +0.0362830944359303 -1.17919253185391 +-2.78926679491997 2.34774274751544 +-1.53585304692388 0.980076614767313 +2.72390427812934 2.60082242637873 +0.47517017647624 0.433611653745174 +0.299833510071039 0.314290840178728 +-2.0219319537282 2.9608591273427 +-2.61948146671057 1.72987485677004 +1.54933106526732 -0.499851129949093 +2.82238410785794 2.42687290161848 +1.49040845409036 1.56385259702802 +0.427926454693079 0.703829422593117 +1.17620247974992 -1.09448551386595 +2.56758751347661 2.49453084543347 +2.14229164645076 1.37796101719141 +-1.78534733504057 2.39341812953353 +-1.94506637006998 0.126419771462679 +-2.26199268549681 1.75549422204494 +1.34297505021095 2.73791403323412 +-0.391734018921852 0.582174029201269 +0.060985591262579 1.27320780605078 +1.39932095631957 1.48046927526593 +1.5136228017509 -0.880764462053776 +1.12025159969926 1.80386709049344 +0.990228433161974 1.66189654916525 +-0.783374238759279 0.104833673685789 +0.126892536878586 -1.36936685442924 +0.426468923687935 -0.12374259531498 +0.944379020482302 0.806345943361521 +-0.287133932113647 1.92520950362086 +-1.44791214540601 0.730396661907434 +2.16479841992259 1.90879921615124 +-2.53914725780487 1.42632775753736 +1.11009929329157 -0.533871129155159 +1.94001681730151 2.96258935332298 +1.31335978582501 2.44427302852273 +1.77992391586304 -0.0300086364150047 +0.844756573438644 1.73728474229574 +-0.166857048869133 1.9801432043314 +0.610263872891665 -0.882506102323532 +-2.38937521353364 2.8504548035562 +-1.27064424753189 0.0774683430790901 +-2.18247332796454 2.54729849845171 +2.2329400293529 1.58304445818067 +-0.934491876512766 -1.75652326270938 +-2.80943645164371 2.36744144931436 +1.0847540050745 1.44449396058917 +-2.65028564631939 1.24637585133314 +1.08447278663516 1.72630621120334 +0.343810733407736 0.212856825441122 +2.83081963285804 2.14261350035667 +-1.929837372154 2.90514193475246 +-0.0464070178568363 0.878266904503107 +-0.127194184809923 1.6854898892343 +-2.11357330530882 2.87049406021833 +1.38634796440601 -0.391260974109173 +-0.127460401505232 1.21677147969604 +-0.0156036429107189 -1.09130707755685 +1.62712199985981 -0.421645026654005 +1.47674088925123 1.51386068016291 +2.35417438298464 2.32163529098034 +-0.0116150118410587 -0.794620949774981 +-0.307622898370028 -1.44338466227055 +-1.03576342016459 1.24820499867201 +1.57169568911195 0.541240524500608 +-1.73478761315346 2.27899528667331 +1.53361180052161 1.45114694163203 +-0.579326320439577 0.908661995083094 +-0.9835423566401 1.03235363587737 +2.13665336370468 2.69047862291336 +1.63789251446724 -0.917652789503336 +1.43423762172461 1.2202184535563 +0.554851125925779 1.41390469297767 +-0.65320848301053 -0.251029964536428 +1.31626556813717 1.02967993542552 +-1.69844851642847 0.853044766932726 +1.9659324772656 -0.345201902091503 +-2.40428134053946 1.3728354498744 +2.5584572404623 1.3877071775496 +0.303306426852942 0.176814738661051 +-2.34402526170015 1.25393917411566 +-1.80699227750301 0.282158695161343 +0.411395467817783 1.76713740825653 +0.605353768914938 -1.73941902071238 +1.34321531653404 1.1232624463737 +-0.0299203917384148 0.236506268382072 +-2.50871723517776 1.58195620030165 +-0.391013480722904 -0.220087386667728 +0.324532236903906 1.28891732543707 +1.49894702807069 0.213011682033539 +-2.71935324370861 1.96808148548007 +1.84430667757988 1.18527668714523 +1.48482031747699 -1.2694385163486 +-0.876954264938831 -1.00909207388759 +1.56161605939269 1.21689524129033 +-0.8716084882617 -0.33866123482585 +2.27512799575925 2.26722835749388 +0.401045873761177 -0.997413162142038 +0.827658258378506 0.557168569415808 +0.340613096952438 0.963561572134495 +-0.41044395416975 -0.340684961527586 +-0.243032779544592 -1.28248865157366 +2.07113479077816 1.2090025767684 +-0.108085427433252 -0.0788223817944527 +0.708779986947775 -0.45957014337182 +-1.24207141250372 -0.0584121011197567 +-1.07045037671924 -1.28725045919418 +-0.865894515067339 1.68840877711773 +1.37976868078113 -1.42127117887139 +-0.363481026142836 0.171184882521629 +-2.17992687225342 2.06267370656133 +-1.83773909136653 2.05474153906107 +0.880719255656004 0.400161068886518 +-0.978168789297342 0.388723645359278 +-1.84748809039593 2.26412176340818 +-2.63423788920045 1.67467019334435 +-1.81288668513298 2.09014561399817 +-2.44771119952202 1.78953400999308 +1.70994303375483 2.87069730088115 +-1.67156808450818 2.90808896720409 +-2.74469723924994 2.27349286526442 +0.691321507096291 -0.585567146539688 +-1.38042062520981 -0.968053206801414 +-0.39667909219861 -0.353070788085461 +0.398405600339174 -1.20897991210222 +-0.287635784596205 -1.0955511033535 +-1.69928913936019 -0.456927265971899 +1.33867180347443 -1.13685166463256 +1.34834719076753 2.50902118906379 +1.98050229251385 1.51944163814187 +-1.05854647606611 -1.51804677769542 +1.14377752691507 1.84234618023038 +1.53079906478524 2.83065773919225 +2.19661881402135 2.85262946784496 +0.287868835031986 -0.510773781687021 +-2.00835399702191 1.93095866218209 +0.310622058808804 0.75826008617878 +-0.190030105412006 -0.761226858943701 +-0.255338937044144 -0.903009202331305 +2.78517828881741 2.28934032469988 +-1.80742545425892 -0.368785314261913 +1.30166174098849 0.647583734244108 +-0.26687516272068 1.92797658592463 +-2.62649197503924 1.87322207912803 +-1.34476486966014 2.53336980938911 +-0.692341540008783 -1.52812625467777 +-0.0950051583349705 -1.15091420337558 +1.06073168292642 0.776285648345947 +1.35498594120145 0.79409758374095 +-1.38910534605384 -0.595482427626848 +-0.224023744463921 -0.0360421650111675 +-1.16178663447499 2.24546315893531 +0.0655077956616879 -0.416848361492157 +-1.35964253544807 1.09076727181673 +2.44515424221754 1.13928099349141 +0.847987443208694 -1.13649037107825 +1.65683241561055 2.07730574160814 +-2.95222776010633 1.80774870142341 +0.399796206504107 -0.895964369177818 +-2.07583208009601 1.20335067808628 +0.660529419779778 -0.394620813429356 +-0.411162719130516 0.473491478711367 +-0.969328831881285 -0.446278657764196 +0.0237792022526264 -1.55781099200249 +-0.995430421084166 -1.24268966913223 +0.2467185780406 -0.691286470741034 +1.18473277613521 2.42099818587303 +-1.00726762786508 -1.10358987003565 +0.940124910324812 -0.518878433853388 +-0.948247779160738 0.3397979401052 +-0.789551608264446 1.23151097074151 +2.48898810520768 2.59765702113509 +-0.893373291939497 0.431356657296419 +-0.0114492811262608 1.6296511888504 +1.83897611498833 1.37603674083948 +-1.77145553007722 -0.202895328402519 +0.25846840813756 -1.25948973372579 +-2.66517697647214 2.31608524173498 +1.69083123654127 -0.215125788003206 +-0.39653080701828 0.182193029671907 +-1.46714804321527 1.37769846618176 +0.793398272246122 -0.589494001120329 +-2.44178625196218 2.57558770850301 +-1.69578438997269 1.32638847455382 +-1.73130038380623 -0.697628919035196 +-1.45899599045515 2.31066827476025 +0.728508248925209 0.53390396386385 +-2.048349943012 1.76371739432216 +1.0664843916893 -1.47686173021793 +-0.399995774030685 0.341513816267252 +-2.72579349949956 1.91473225876689 +1.6675011664629 0.553028177469969 +1.66565986722708 2.79927109926939 +-0.866074729710817 -0.913385849446058 +1.52837346494198 2.00925659760833 +-1.37585379183292 -0.129483666270971 +0.49079716950655 -1.88735348731279 +0.994381971657276 -0.313912995159626 +2.09871418401599 1.93775032833219 +-0.224189344793558 1.85717357695103 +-1.05355723947287 1.5383498519659 +2.50795531272888 1.6599242836237 +-0.209888704121113 -1.73083022981882 +-0.405090358108282 -0.565305110067129 +-0.930105268955231 -1.21840662509203 +-2.86735175549984 2.40231940150261 +-0.719609744846821 0.374521631747484 +-1.05226128920913 0.505287166684866 +-2.55614624544978 2.06819202750921 +1.6060251891613 1.13624165207148 +-1.67735923081636 2.55246483907104 +-0.446074556559324 -0.679231196641922 +0.916682865470648 -0.0678660497069359 +1.22196538746357 1.88870638608932 +0.597849823534489 0.674730945378542 +-0.704685419797897 -0.258318159729242 +-2.0543662942946 2.02656149864197 +0.891719341278076 0.816141601651907 +-0.118545908480883 -1.25776182115078 +1.2002090215683 1.19628472998738 +-1.05952686443925 -0.962503124028444 +-1.69786906987429 0.951651737093925 +-0.650995850563049 0.634621616452932 +-1.71686647087336 2.03743363916874 +-1.99763114005327 1.72805035486817 +-0.205468993633986 1.85056952387094 +1.02870529517531 0.064549345523119 +-0.146322827786207 -1.89333761483431 +-0.489826455712318 0.535165868699551 +-0.072777833789587 1.65101490542293 +-0.497606657445431 1.06842465698719 +-2.20949180051684 2.34132381156087 +0.848336264491081 -1.17849794775248 +1.57550603896379 2.4611015804112 +0.335405975580215 -1.66653947159648 +1.14221269264817 0.0195551477372646 +-2.31000315397978 2.55993187054992 +-1.72967474535108 0.524641089141369 +-1.36199149489403 -0.719061348587275 +0.118909396231174 0.594151802361012 +-0.481104087084532 0.483846262097359 +1.32291376963258 2.18453773856163 +1.50200623646379 1.61092294380069 +0.645074732601643 1.82741669565439 +1.19223527982831 0.463022213429213 +0.295538507401943 -1.79846029356122 +0.998433597385883 -1.39476082473993 +0.842774197459221 1.49966719001532 +-0.817861273884773 1.49999779462814 +-1.87652073055506 1.19928789138794 +0.749096009880304 1.18500901013613 +1.42933953180909 1.7409652993083 +0.190450336784124 -0.641610156744719 +0.0293982215225697 1.49306548759341 +1.91142379492521 2.87014353647828 +0.465804819017649 1.67435042187572 +1.36204594001174 0.968509033322334 +1.96967791765928 2.60114816576242 +-1.90417007729411 -0.504173867404461 +0.534611448645592 1.45037935301661 +-0.411468416452408 1.12366824224591 +-0.212304782122374 0.0326254516839981 +-2.94109025597572 1.7348535656929 +1.7620582357049 2.95560726895928 +2.36963600665331 2.13563994318247 +0.796562135219574 0.867073815315962 +-1.0738144852221 0.769067261368036 +-0.894149377942085 1.09511521458626 +1.367077216506 -0.578886575996876 +-1.66159275919199 -0.063445907086134 +1.57394280284643 2.70498381182551 +-0.342795513570309 1.46581531316042 +1.76578427478671 0.931401409208775 +1.2406296171248 -0.392780505120754 +-0.470866855233908 -1.25322892144322 +0.85710883885622 1.35114281252027 +1.60290158912539 0.399022199213505 +-1.36654223129153 1.87416058033705 +1.19785878807306 1.79522192850709 +-1.76364934444427 0.674925483763218 +1.8253058232367 1.46455241367221 +1.46532701328397 -0.441478464752436 +-1.3570507094264 1.03697694465518 +1.83215926587582 2.60779842361808 +1.68517207354307 2.01072565093637 +-1.69096244871616 2.16264498978853 +0.284031357616186 -0.572413012385368 +0.186469081789255 1.54226495698094 +1.74049527570605 0.670950286090374 +-2.02896302193403 2.41771272197366 +-0.992539379745722 0.485733367502689 +0.689439572393894 1.64766439050436 +1.13693347945809 -1.06992120295763 +-1.50385212153196 0.705824203789234 +1.31875424832106 1.68195251375437 +-1.43622453138232 1.60683204978704 +1.00236030295491 1.62821625173092 +-0.178943950682878 1.84273086115718 +0.822258949279785 1.5556476265192 +-1.84905136376619 0.692009579390287 +-1.14245877414942 -0.504500448703766 +-1.16809325292706 1.63011059910059 +-0.510792605578899 0.568752486258745 +0.0902158506214619 -0.156237129122019 +1.65943833440542 1.95333684235811 +1.6490728110075 -0.830575123429298 +0.842296838760376 1.80991211906075 +-0.976201076060534 1.61962333321571 +-2.03071438893676 1.97334095463157 +2.51917824521661 1.81989790871739 +1.45615021139383 2.3375513330102 +1.66225219145417 1.48205820098519 +-2.69205501675606 2.13127242401242 +-0.28161371126771 0.360685806721449 +-1.06875893846154 2.08794489875436 +-1.30800876393914 2.23485289886594 +-2.56489435583353 2.62227062135935 +0.826839290559292 -1.24861378967762 +0.54273496940732 0.41540090739727 +-0.967941235750914 1.52576611936092 +-1.14812186360359 -0.0538490042090416 +1.83834745362401 2.29895519465208 +1.51831766963005 -0.31090060994029 +-1.26423227414489 -0.692978259176016 +0.272558357566595 1.03568541258574 +-1.97787779197097 1.48064244538546 +1.52979004383087 0.813436396420002 +-1.53281265124679 2.59598413109779 +-1.04855858534575 -1.64809196069837 +0.696533568203449 -1.3451510630548 +0.372460730373859 1.33964887261391 +0.72839741781354 -0.147969048470259 +1.80492720380425 0.00995176658034325 +1.1327454559505 -0.371099308133125 +-1.11069730296731 2.03052124381065 +-0.806988656520844 1.6305853202939 +0.339484926313162 -1.95975989848375 +-0.42725782841444 1.37876681983471 +-1.03579676523805 -0.822318635880947 +-0.835364256054163 0.118381723761559 +-1.69017662480474 1.56672967597842 +0.352252677083015 -1.29679801687598 +-0.407680682837963 0.732872687280178 +1.51553176343441 1.48318805545568 +-0.110122289508581 1.30076130107045 +0.874081172049046 -0.11766704171896 +-0.118740659207106 1.01879732683301 +-0.214172098785639 -0.491531331092119 +2.24064241349697 2.87983257323503 +-0.415952708572149 0.371467906981707 +-0.203842297196388 0.960098944604397 +1.2062905356288 -0.699965976178646 +2.74278998374939 1.86408155411482 +0.320603862404823 -0.159329816699028 +-0.0153518579900265 1.08581601455808 +2.19744473695755 1.01873058453202 +0.768358811736107 1.52204347029328 +-1.28730553016067 -1.31758530437946 +2.73946379125118 2.01137364655733 +-1.45535847917199 0.324210744351149 +-0.56381393969059 0.775717336684465 +1.23908796533942 0.78735040128231 +2.14329965040088 1.59293360263109 +1.90373154357076 2.53903841599822 +2.5945466645062 1.44555818662047 +0.20230470225215 -1.37407327070832 +-0.143388524651527 1.58482145145535 +0.478755515068769 1.40711262449622 +1.79617174342275 1.37725790590048 +2.66346601769328 2.44661457836628 +-1.38583933189511 -1.09997988119721 +0.834720861166716 -0.0740629248321056 +-1.31419216096401 1.52753995358944 +-1.852643661201 0.139946516603231 +-1.00235583260655 -0.30242482945323 +-1.99337348341942 2.84662327170372 +1.14156935364008 -0.324354536831379 +1.84718488529325 0.0352894403040409 +-1.96368502080441 2.57954894006252 +-0.119997628033161 0.123996611684561 +-1.30024753510952 -0.101663548499346 +-0.168753363192081 1.18241372704506 +2.93763614445925 1.95625750347972 +-1.41224979609251 -1.24899871274829 +-0.0054188072681427 1.74759991466999 +-1.03154782950878 0.435133077204227 +0.839729141443968 -1.45748181268573 +-0.517120651900768 -1.72570560872555 +1.26823446527123 0.853403646498919 +-0.436354514211416 0.444917909801006 +-0.29560436680913 -1.28405167534947 +1.38472352549434 -1.38490027934313 +1.22257509082556 -0.216967049986124 +0.988179478794336 1.65502151846886 +-2.54481798782945 2.64023477211595 +-1.69630657508969 -0.620360881090164 +0.975335337221622 -0.261935099959373 +-1.27413498610258 1.15585869923234 +0.206136699765921 1.72873165458441 +1.20638193935156 -1.38504011556506 +1.71863244473934 0.75810419395566 +1.59893619269133 1.30716761946678 +-0.0872636735439301 1.75481424853206 +-0.396535135805607 -0.98787384852767 +0.942408345639706 -0.857676532119513 +-1.60925663262606 -0.800327889621258 +0.766681645065546 1.38158478587866 +-2.88374525308609 1.80483250692487 +-1.60104274377227 -0.656006369739771 +0.63156796246767 0.275336101651192 +0.436904773116112 -0.424379881471395 +-1.12482491508126 0.109235595911741 +1.1510897949338 2.26205512136221 +-0.919150307774544 -0.338728349655867 +-1.32145115733147 2.33375080674887 +1.55333189293742 2.20352626591921 +2.00664648413658 1.59387443214655 +1.38568602502346 1.16200558468699 +1.51604575663805 -0.264383304864168 +1.41702525690198 1.18318856880069 +-1.05505332350731 2.15301766991615 +-0.358058448880911 -1.4770181439817 +1.5484751239419 0.169570002704859 +-1.90984745696187 1.11906500160694 +1.06544123962522 0.451184146106243 +0.698303736746311 0.779252018779516 +-1.89674927294254 2.85876985266805 +0.663066368550062 1.66516718268394 +-0.187380358576775 1.42472422868013 +0.61217688024044 0.0304476171731949 +-1.00905916467309 -1.15516988188028 +0.260148886591196 -1.2026321105659 +-2.29356816038489 1.65352784097195 +1.41498181223869 2.55915705114603 +0.546138528734446 -0.960829388350248 +1.08066956326365 -1.61701476201415 +0.986718844622374 -0.00140663981437683 +0.383860133588314 -0.538665536791086 +-0.900959778577089 -0.344505857676268 +0.305624064058065 1.72203767672181 +-1.34755127504468 -0.570920672267675 +-1.42425990104675 2.66224671900272 +0.489251203835011 -0.969876617193222 +2.39122373610735 2.12566673010588 +1.87761353701353 0.161246441304684 +1.21593409404159 -1.39191134274006 +-1.10211608186364 -0.405689969658852 +-0.80924753844738 0.366683252155781 +0.879422273486853 0.943750932812691 +0.583170592784882 0.918510638177395 +-0.969998840242624 0.598363827914 +1.53621461614966 -1.06716960296035 +-1.03470307216048 -0.632836308330297 +-2.80318377539515 2.4741077311337 +-0.924252562224865 -1.66891806945205 +-0.246250450611115 1.13012455776334 +1.9019998498261 -0.465613212436438 +-0.417678818106651 -1.03416936472058 +-0.968777470290661 1.2820307277143 +2.00996989384294 1.8973821029067 +-0.243026554584503 0.406995803117752 +2.20470736548305 2.5316822193563 +-1.88890985772014 2.85923311859369 +-1.00111996382475 0.605536110699177 +2.59369395673275 2.66254781186581 +-2.60133486241102 2.24423136934638 +-1.19778894633055 0.537084687501192 +2.93184942752123 1.74764718487859 +0.923353426158428 0.0389558300375938 +1.26853878051043 1.79650514572859 +-0.518099043518305 0.617211516946554 +-1.12411794438958 -0.800968367606401 +-2.14840409532189 2.00766563042998 +-0.996197186410427 -1.35240441188216 +-1.48294937238097 2.23742739111185 +0.968910474330187 -0.374919943511486 +-0.738123778253794 -0.619868505746126 +0.945061028003693 0.873564630746841 +0.171944085508585 0.470166921615601 +1.32497907429934 1.97018453478813 +-0.136439260095358 0.24253448843956 +2.28391725197434 1.03662192821503 +-1.91333835199475 -0.0480712279677391 +-1.34333511814475 2.00466987863183 +-0.166630610823631 1.89760782942176 +1.0279202349484 -1.13903166726232 +0.975406661629677 -0.124645859003067 +1.67279401421547 0.925769589841366 +-1.94899424910545 0.208681590855122 +-1.75259163230658 -0.618096582591534 +-2.3955903314054 1.95954241231084 +-0.446101505309343 1.50143673270941 +1.66096949949861 1.21293381229043 +-0.543239910155535 1.79903465509415 +-2.38803862035275 2.36852484568954 +-1.73078320920467 2.51394088938832 +-2.50258870795369 1.66467072442174 +-0.0782664828002453 0.486649684607983 +1.19178538769484 1.79113034904003 +-0.317793242633343 -0.896779026836157 +-1.95786874368787 0.0139664188027382 +1.40688188746572 0.776551216840744 +1.04709561914206 0.0813212543725967 +1.64535471796989 1.6262321062386 +1.18573338910937 0.536393497139215 +0.701537173241377 -0.638793349266052 +-2.25342507660389 2.04687113314867 +1.42687803134322 -0.366667557507753 +-1.22290619835258 -0.614000741392374 +1.71993091702461 -0.700180523097515 +1.90710308030248 -0.0727968811988831 +0.796060774475336 -0.900468502193689 +-0.568262230604887 -1.25835485383868 +-1.93757561221719 2.63717301934958 +0.292537242174149 -0.864445213228464 +2.30495481193066 1.42506176605821 +-2.1223965100944 2.75947442278266 +0.295025359839201 0.0513396263122559 +-1.81718950346112 2.52630697563291 +1.37428697198629 0.997273214161396 +1.6294732503593 0.941270407289267 +-0.435313690453768 1.19484461471438 +1.88727264106274 1.37548463791609 +-1.71844590455294 2.46764164790511 +2.52898050099611 1.71197479590774 +-2.67951805517077 2.05714770033956 +1.15363360196352 0.510882560163736 +1.63217231631279 1.44532474130392 +2.89793423935771 1.93908118084073 +-0.874428316950798 1.17062466219068 +-0.775780286639929 -1.7684477083385 +0.254469126462936 -0.000789850950241089 +1.47978096455336 0.476538967341185 +-0.243857368826866 1.73760278895497 +0.591321937739849 0.991762358695269 +1.50001657009125 -0.117703784257174 +0.61852628365159 0.693893823772669 +-1.81195368990302 2.21055035293102 +-1.85605587437749 1.82814200595021 +-2.48879143223166 1.31928436085582 +-1.37721335887909 1.69532751664519 +-0.813182186335325 -0.389913730323315 +2.39468919113278 1.15957869589329 +0.534184690564871 0.289443306624889 +-2.36266994848847 1.2572060264647 +1.3303911164403 0.30174845457077 +-1.38196036592126 0.409206200391054 +2.35846554860473 2.24114617332816 +-0.914730504155159 1.28888985514641 +0.183831825852394 -0.146039597690105 +1.26977514848113 1.77882845327258 +-2.44739438220859 2.33298860862851 +-1.74329531565309 1.35688468441367 +-0.929636225104332 0.920690514147282 +-0.494702156633139 -0.813763901591301 +-0.0661061853170395 1.89849789813161 +1.47438013553619 0.193619310855865 +1.58672252297401 0.733369510620832 +-0.351892650127411 0.284267034381628 +1.75767408311367 1.57967934384942 +-0.0640014037489891 -1.34495693445206 +-0.862876426428556 -0.317366231232882 +2.25794350728393 2.42369047552347 +1.7709633409977 -0.555331382900476 +1.6873746663332 0.740245223045349 +-0.17850324138999 -0.340132746845484 +-0.860036619007587 0.968185175210238 +-0.514636486768723 -0.682563856244087 +0.505094725638628 1.5740845836699 +1.55542559549212 -0.706669516861439 +1.77699332684278 0.396493799984455 +1.28156410157681 -0.298988975584507 +0.395728535950184 -0.234021820127964 +-1.20975178480148 2.48341897130013 +0.799201633781195 0.131150975823402 +0.476948041468859 1.32324445992708 +-0.992352154105902 0.210497617721558 +-0.342820681631565 0.314458131790161 +1.48426842316985 -0.39131597802043 +0.350065391510725 -1.54851989075541 +1.44512277841568 -1.03753179684281 +0.271916903555393 -1.02282145991921 +0.0359596163034439 -0.484077002853155 +-1.82913268730044 0.668701436370611 +1.88482846692204 2.68528117984533 +-0.858602471649647 -1.78085525333881 +0.724907800555229 1.70384765416384 +-1.41043975576758 1.87408371269703 +-0.826202359050512 0.869140725582838 +1.91067579761147 1.13892366364598 +-0.59949029609561 0.764587610960007 +-1.24684447422624 2.4466005936265 +-0.686483476310968 1.68860962986946 +0.485084772109985 1.69869395345449 +1.14999643713236 -1.10347502678633 +0.685935221612453 -0.944097194820642 +0.0572578944265842 0.657803799957037 +-0.702888116240501 1.38594684004784 +1.99081226810813 2.48890936374664 +-1.34413227066398 0.41770676523447 +-0.393703136593103 -0.915893383324146 +0.0851183570921421 -1.93488254770637 +-1.76424411684275 0.0818308033049107 +0.456221580505371 -0.0236069895327091 +-2.89251169189811 1.73848870024085 +2.27799066901207 1.97804321348667 +1.88997600227594 0.399365697056055 +-1.50941341370344 0.34359584748745 +2.11920883506536 1.7085314206779 +-0.114053167402744 -1.29844237864017 +-0.628813672810793 0.976438838988543 +-2.63483680784702 2.5202415920794 +1.85074241831899 1.78484361618757 +1.8484637401998 2.53018717467785 +0.471301104873419 1.28805090859532 +0.847472358494997 -0.898552682250738 +0.646543309092522 -0.16012892127037 +-0.952230434864759 0.766324136406183 +0.614107564091682 -0.99101035669446 +0.345127291977406 1.07886619493365 +-0.195697739720345 -1.05000903829932 +-0.380752563476562 -1.73475751653314 +1.42265178263187 2.02415054291487 +2.74424086138606 2.10925237834454 +1.51389836892486 1.02285511791706 +-0.0243615470826626 -0.544542454183102 +-1.03418653830886 -1.3781758248806 +-0.871465746313334 -1.16859907656908 +1.34217870235443 -1.1608718521893 +1.6652176566422 1.35767546668649 +0.674173839390278 -1.12706139683723 +-1.14613376930356 -1.51314881816506 +-0.612754043191671 -0.907412678003311 +-2.46426976472139 1.84816674143076 +0.201469924300909 -1.05186136439443 +-0.422686669975519 0.790157876908779 +-1.88278864324093 0.270567934960127 +1.08508414775133 -1.11333323642612 +-0.0922389291226864 -0.533447869122028 +1.30988063290715 0.458867628127337 +0.22495498508215 -0.683922026306391 +0.7210941798985 -1.60799758508801 +-1.52406945824623 2.73538257926702 +-0.81558121368289 -1.3732994236052 +-0.425842612981796 -1.20989165455103 +1.91299761459231 -0.568206589668989 +2.01833194121718 2.39916389808059 +-2.39010912179947 1.42142435908318 +-1.74438965320587 -0.0919308848679066 +0.667577587068081 -1.6644991338253 +0.779206871986389 -1.69361850991845 +-1.72693637013435 2.60676474124193 +0.55147647485137 1.6550363227725 +1.45989297330379 -1.05599289759994 +0.0830420926213264 1.1966425254941 +-1.27489920705557 -0.112825509160757 +-0.908308487385511 0.995205584913492 +1.95256850495934 2.94590635225177 +1.43996588513255 -1.07236932963133 +1.20131640508771 -0.742516659200191 +1.71982816234231 1.33500841632485 +2.88549350574613 1.94913253933191 +0.763437524437904 1.73916415497661 +0.574139315634966 -0.420364756137133 +0.791226744651794 0.102843396365643 +2.3750647008419 1.30363932624459 +1.09366819262505 1.34090814739466 +-0.884736098349094 -1.63358797505498 +0.53565776348114 -1.89953406527638 +1.12863880395889 -0.939692486077547 +-1.53254697099328 0.530098877847195 +0.748308002948761 0.629435177892447 +0.654815282672644 -1.76358873397112 +-0.393861476331949 -0.799084816128016 +1.08036127313972 1.92055021598935 +1.29661412164569 0.152120597660542 +-1.94103948771954 1.51171483099461 +-0.128262355923653 0.21882926300168 +0.572146777063608 -0.657472055405378 +1.37985510006547 -0.839087370783091 +0.916110672056675 -0.797529678791761 +1.38602843880653 2.2193302847445 +1.25059646368027 2.10587720200419 +-1.34656838700175 0.339540153741837 +0.226668391376734 0.712495904415846 +0.270090781152248 0.00950812548398972 +-1.1902117356658 2.35396900773048 +-1.63628279417753 2.84932683035731 +-1.08416346460581 0.418444562703371 +-1.9215960688889 2.13578252494335 +0.618572555482388 1.12661247327924 +1.12226746976376 1.89789623767138 +1.54118473827839 0.77468553185463 +-1.95916248485446 2.48849951475859 +1.6124264113605 -0.422878809273243 +2.69668290391564 2.11427798122168 +1.86908978223801 -0.490568544715643 +1.6840731613338 -0.993116207420826 +1.74183012917638 1.42872476577759 +-0.411080900579691 -0.817828830331564 +2.82886799424887 2.03234412893653 +1.77454587444663 2.51223191618919 +-2.31687952205539 2.02331018820405 +-2.17064027488232 1.09915795177221 +0.523413483053446 0.473853029310703 +-0.638563267886639 1.68507878109813 +0.984816689044237 0.069355633109808 +-0.933146186172962 1.6705283112824 +0.71813366189599 0.485214486718178 +-0.843744456768036 0.0497310347855091 +2.90178822353482 2.15601041540504 +-2.10211572423577 1.12856120616198 +-0.0507114380598068 -1.42241524532437 +-1.79691607505083 1.89023927971721 +0.959088746458292 1.05992981418967 +-1.85609507188201 1.14761648699641 +-2.26836796849966 1.54121755063534 +-1.96691389009356 -0.139477670192719 +0.0806708969175816 0.793941393494606 +-1.1332274787128 -0.334226533770561 +0.0401289910078049 -1.04977475479245 +-1.41491392254829 1.65214296802878 +1.76684099435806 0.0535396300256252 +1.48044414073229 0.301903005689383 +-0.024097204208374 1.42787960544229 +0.878896530717611 1.11838189512491 +1.40061384811997 -0.167424369603395 +-0.488874722272158 -0.697493743151426 +-0.267077516764402 1.38813158124685 +-1.25714904814959 -0.544302802532911 +-0.507697250694036 -1.05634210631251 +-1.55468520894647 0.376363899558783 +-2.16683458536863 2.25157957524061 +0.281365387141705 -0.143860466778278 +0.288190789520741 -1.92122063413262 +0.518973927944899 1.50704806298018 +-0.245032917708158 1.71089277043939 +-2.68925501033664 2.41563714295626 +1.31889071315527 2.65617146342993 +2.17284151539207 2.51147160306573 +-1.10055869817734 2.272013053298 +-1.32014496251941 -1.21443381160498 +1.06016584485769 -0.210896413773298 +-2.25808301568031 1.14688080176711 +0.975884988903999 -1.47191211208701 +-1.63214645907283 -0.215129230171442 +-2.53789008408785 2.62086869776249 +-1.70535764470696 1.13262339308858 +0.0551386214792728 -1.70519518107176 +-0.442782033234835 0.935897544026375 +1.92194256559014 0.390244331210852 +-1.1009604036808 0.386501774191856 +2.91439870372415 2.06311585754156 +0.337399885058403 -1.0453582033515 +-1.30170544609427 -1.00029923394322 +1.20396683365107 -1.3282556347549 +1.13813726603985 -1.18326728790998 +-0.689709939062595 -1.87015073001385 +1.74858369678259 1.70130651816726 +-2.59149133041501 1.29378889873624 +0.474475856870413 -1.92297760769725 +1.4278754927218 2.0499471463263 +-1.20605953037739 2.54013102501631 +0.923003181815147 0.264069519937038 +1.84471410140395 0.991089060902596 +-1.37053893506527 1.42480167001486 +-1.53398653492332 -0.694393869489431 +1.2887154892087 -0.784375056624413 +0.640285693109035 1.62643254175782 +-2.27647408470511 1.55260406062007 +-0.299826052039862 0.635927557945251 +0.881874818354845 -1.2718690559268 +1.59365097805858 0.811991211026907 +0.403117448091507 -0.502604890614748 +0.74269599840045 0.382173452526331 +-0.319134410470724 -1.82321806252003 +-1.49584221094847 2.2715064547956 +-1.00194687768817 0.409667234867811 +2.07867283374071 2.05325496941805 +0.232213128358126 1.69324882701039 +-0.049113467335701 -0.0393387340009212 +0.777842655777931 1.21763714030385 +-1.07663184776902 1.95193273946643 +2.16384015604854 1.44214285537601 +-2.25962799414992 1.48375824838877 +1.4239885546267 1.71777135506272 +-1.36490358784795 2.0131533369422 +-2.22209000214934 1.19982708245516 +0.82531526312232 -0.278329316526651 +-1.70545051619411 0.860052708536386 +-0.357414375990629 0.941951371729374 +1.10000788420439 -1.37199730426073 +-0.0570443794131279 -1.97479598969221 +-0.812472648918629 0.0831182077527046 +1.80845773592591 1.99209493771195 +1.32344111800194 -0.538439873605967 +-0.357200480997562 -1.71669362857938 +1.53186367824674 0.315534025430679 +0.82576085627079 1.73452794924378 +0.219079677015543 -0.404678825289011 +-2.33490384370089 2.33919684588909 +-1.19817236810923 1.55901867523789 +0.716684281826019 -1.24050599336624 +0.397743690758944 0.00913381949067116 +-0.835496604442596 -0.0151680894196033 +-0.0634462684392929 1.58912302553654 +0.900984782725573 0.758612770587206 +-1.85306737944484 0.319148607552052 +2.09138020128012 1.37290551140904 +-0.661753442138433 -0.941579073667526 +-0.608980346471071 -1.50077041983604 +1.17374515160918 0.831169582903385 +1.31787206232548 2.3420607149601 +-1.07449563592672 1.96222720295191 +1.09921019524336 -0.449164088815451 +1.26344970241189 1.43923228606582 +-0.216418702155352 -1.00634315982461 +1.25332662090659 -1.31415799632668 +0.0527378357946873 0.759414691478014 +1.18124564364552 -0.0490584969520569 +1.22006125375628 0.9792794957757 +0.695145547389984 0.617221396416426 +-0.254801765084267 1.1906960234046 +-1.28299098089337 0.642135489732027 +2.45746229588985 1.70496477931738 +-1.17332484200597 -1.59307746961713 +-1.9483567327261 1.16866063699126 +1.02704747766256 -0.011035967618227 +-0.246433570981026 1.05056077241898 +0.672275342047215 0.517486501485109 +2.17723604664207 1.25270359963179 +1.08011062815785 -1.47078445926309 +1.36173336952925 -0.847719602286816 +1.30454055964947 1.68074179440737 +-1.06599236279726 1.10785324126482 +-0.838723484426737 -1.13782396912575 +-2.44895866885781 2.47229275479913 +-2.05379516631365 2.39817933738232 +-0.601157538592815 0.903928644955158 +-0.396757740527391 0.354050010442734 +1.28343425318599 0.816341821104288 +-1.74744001403451 0.783317513763905 +0.767092797905207 -0.600551724433899 +-1.38730107620358 2.3730147369206 +-0.636607553809881 0.923654016107321 +1.61563485115767 1.73009637743235 +2.11377661302686 2.3664588406682 +-0.757848646491766 -0.156943991780281 +-2.69702971354127 1.57283791154623 +-2.26730696484447 1.05249204114079 +2.19485745579004 1.754384547472 +1.15237063169479 1.72530112415552 +-1.64448521658778 1.87912366166711 +0.0320996046066284 0.480465646833181 +0.182397276163101 1.4416377954185 +1.62722728773952 1.43145428225398 +-1.48904189839959 -0.0826472714543343 +-1.08972382918 0.00926142185926437 +-2.68728789687157 1.49247267842293 +-0.757184777408838 1.53481579571962 +-0.833796966820955 0.443349592387676 +0.478429917246103 -0.800728984177113 +0.722370959818363 -0.79191180691123 +0.403286144137383 0.462460938841105 +-1.39579147845507 -0.724067036062479 +0.444058105349541 0.818816624581814 +-1.68277539685369 2.22070788219571 +0.838686894625425 0.6393784545362 +-1.91486190259457 0.0253846943378448 +0.600966982543468 -1.82436042279005 +-1.277842245996 -0.118559155613184 +1.76276523247361 1.68960708752275 +-0.99296984449029 0.424704868346453 +1.18724594637752 1.34375468268991 +2.37514391541481 2.23675296828151 +0.0116711668670177 0.188089456409216 +-1.8155354373157 -0.765440158545971 +1.69455172866583 2.68337533250451 +-1.59722140058875 2.86907280981541 +-1.32735952362418 2.59759429469705 +0.0695234052836895 1.56843637302518 +1.60075604915619 -0.21474239602685 +1.39565305039287 0.347094155848026 +0.427820183336735 -0.0469322092831135 +1.32686518877745 -0.921629123389721 +-0.121726859360933 0.32896738126874 +-1.64968452230096 -0.955181710422039 +-1.48030536994338 -1.03632619231939 +2.09005613252521 2.37288067117333 +-1.66290573403239 -0.27161306515336 +-0.574966479092836 -0.442513599991798 +-1.61015925183892 -0.237352445721626 +-0.89203791692853 -0.222680982202291 +0.401835411787033 0.238728575408459 +2.32810546457767 2.73514260724187 +2.13840274512768 2.72887314856052 +0.342562105506659 -0.669605202972889 +-1.42675496265292 1.05409230664372 +2.1860903352499 2.09139884635806 +0.177124235779047 -1.15622011944652 +0.797249980270863 1.4084002636373 +1.33378928527236 0.105814483016729 +-1.47985967993736 0.860745962709188 +1.44199556112289 2.72767503932118 +0.240204323083162 1.08703753352165 +-1.37344567477703 2.04781191051006 +0.638807997107506 -1.77248014882207 +1.12619720026851 1.58048415929079 +-0.736825603991747 -0.0648382604122162 +0.791943285614252 1.8357020393014 +-1.93764190003276 2.1579017713666 +0.183952249586582 0.720884148031473 +-0.574784457683563 -1.76957397907972 +1.5601919144392 -0.0849664099514484 +1.51056436449289 -1.08003137633204 +0.300978627055883 0.930966433137655 +-1.27740582823753 -1.43966552242637 +-2.33089292421937 2.57113075256348 +1.36688715964556 -1.17987874895334 +0.0270423777401447 0.124489039182663 +0.47801761329174 1.86993185058236 +-0.705462254583836 1.81708582863212 +0.907675970345736 1.0867829695344 +0.958536431193352 -0.456652037799358 +-1.68811328709126 1.87701383978128 +1.65034692361951 -1.05884345248342 +2.63114537298679 1.61032423749566 +-1.77697109431028 0.176823940128088 +0.598320171236992 0.619921676814556 +1.88289631158113 2.31373123079538 +-2.4466074667871 2.34626325964928 +1.1007204093039 2.22988183051348 +-0.737929683178663 -0.263063512742519 +2.55732155591249 1.27699478715658 +0.816300697624683 -0.566114872694016 +0.181062240153551 -1.68932795152068 +-0.0829509943723679 0.236184768378735 +-1.13077882304788 1.12926836311817 +1.05313351377845 1.37948416173458 +-1.07230304554105 1.67760471254587 +0.529088359326124 -1.3889919295907 +1.12233648449183 -0.999941252171993 +2.18413662165403 2.46712590381503 +1.82675424218178 0.802689839154482 +-0.31131449714303 -1.51827676966786 +-0.808601349592209 1.54608147218823 +0.820773400366306 1.28032238781452 +-1.36505043879151 -0.121335104107857 +1.00641019642353 -1.31327236443758 +0.0895601324737072 -1.26932215318084 +0.814994908869267 0.195346876978874 +1.92312697321177 0.137690834701061 +-1.27207358926535 2.08454034104943 +-0.367964740842581 1.03783335164189 +2.32428394258022 2.28126840665936 +-1.00152835249901 0.739853352308273 +-2.67709967494011 2.02842901274562 +-0.317531622946262 -0.109149869531393 +-0.0703591071069241 1.77577510103583 +1.74775115028024 -0.400639072060585 +-2.83479559794068 2.44857370480895 +-1.14279606193304 0.562383472919464 +0.11664829403162 -1.91595252603292 +-0.870340034365654 0.443951494991779 +-0.154738746583462 1.73228726536036 +-1.50423948839307 2.01367106288671 +1.48824631795287 1.58367779105902 +0.237586673349142 1.08566350862384 +0.0403612852096558 -1.93814538046718 +-1.37780929356813 -1.41751243546605 +-1.66037364676595 1.79575973004103 +-1.1087427213788 -0.116405360400677 +1.02777711302042 0.609361436218023 +-1.78872579336166 0.584985569119453 +-1.43333660438657 2.62718802690506 +-0.689389616250992 0.691845260560513 +-0.453542578965425 -1.25610475242138 +-0.117369778454304 1.50894331559539 +-0.380393926054239 0.606017965823412 +1.03743567317724 0.282101418823004 +-1.01192846894264 0.0839029140770435 +0.705929588526487 -1.11110743507743 +1.53036653995514 -1.03417452052236 +0.232523433864117 0.776294965296984 +1.55912357196212 1.66112692281604 +0.616195481270552 -0.0380712412297726 +-0.421999789774418 0.63932940363884 +0.0344843827188015 -1.98754503205419 +1.28721609711647 2.51356943324208 +0.402174923568964 -1.45315268263221 +1.50819424167275 -1.15865696966648 +0.063001174479723 -0.630236987024546 +0.974642738699913 1.16951548308134 +-2.94578353315592 1.78955787420273 +-1.19469363614917 -1.0947123542428 +0.166334897279739 1.50383788719773 +-0.856311362236738 -0.0626157857477665 +-0.718085672706366 -1.51099406182766 +0.525471851229668 0.334403157234192 +1.0380276106298 0.342794377356768 +-0.367426916956902 -0.39081934094429 +-1.79583804309368 0.111413031816483 +-1.71747960150242 -0.88097608089447 +1.0313155092299 0.249671775847673 +-0.857296355068684 1.14199535921216 +-0.164244167506695 -0.0294807888567448 +0.784875478595495 -0.105897724628448 +1.52135105803609 -0.301928266882896 +-1.70197738334537 0.887017354369164 +-0.29646672680974 1.78780960291624 +-2.43767387792468 2.80297839641571 +-1.07150802761316 1.32888625562191 +1.96760533004999 -0.0822743400931358 +2.54846205562353 2.38143350556493 +1.89138162881136 -0.505773980170488 +0.731997057795525 1.824781332165 +-1.23025154322386 2.37042721733451 +2.10502476617694 1.01790845766664 +-0.289257038384676 0.884468097239733 +0.523988343775272 1.71371848508716 +1.37786173447967 0.152360193431377 +1.29997001588345 -1.4713263399899 +-2.09816644713283 1.11219798028469 +1.24854521080852 1.5086470246315 +-1.54973693564534 1.12164567410946 +-2.53959316387773 2.5507928468287 +1.27415506541729 -0.254352308809757 +-1.36860021948814 -1.07531612738967 +-0.103643998503685 1.1985578648746 +1.15082139521837 -0.408619735389948 +0.665836647152901 1.80017303675413 +-1.99358182400465 2.38361864164472 +1.41355921328068 1.4140549339354 +0.832684136927128 0.27773517742753 +2.55431404709816 1.60115481540561 +0.261851582676172 -1.40409507602453 +0.332075834274292 0.989556409418583 +-1.26759611070156 0.284901026636362 +0.668400105088949 -0.881270457059145 +-2.51353630051017 2.10503265634179 +-0.856151558458805 -0.129454500973225 +-1.73780505359173 2.29603727534413 +-0.219757039099932 1.72422908619046 +1.19712603092194 -0.905392464250326 +0.302428528666496 1.5408494360745 +1.0274640545249 -1.45491936057806 +1.81379937753081 2.27189913764596 +-1.3341751024127 0.13517551869154 +1.20229933783412 -0.973355367779732 +1.84877020120621 2.662238355726 +0.419793553650379 -0.917647793889046 +-1.35152518749237 2.21055740863085 +1.52457546815276 1.54749300703406 +-0.205987624824047 0.103553749620914 +-1.86822104454041 -0.712811257690191 +1.40427971258759 -0.450319468975067 +-0.374406468123198 0.30898892134428 +-0.0756941922008991 1.65920081734657 +-1.4827020354569 -0.148320492357016 +1.75215403735638 0.382022500038147 +-1.62039963155985 -0.642071709036827 +2.08675856888294 2.91796289011836 +1.31535199657083 0.898446545004845 +1.33989696577191 2.26795882731676 +-0.407118331640959 -0.864776853471994 +-0.443020809441805 -0.573748584836721 +-1.14785221591592 0.225090265274048 +-0.633429512381554 1.76854622736573 +-1.7646608799696 -0.507092922925949 +0.0646654926240444 0.300363935530186 +1.64097373187542 0.793197266757488 +-1.98553922772408 1.42902074381709 +-1.14411998540163 -0.964608911424875 +-0.369410734623671 -0.366339981555939 +-0.999868925660849 -0.0337170921266079 +1.45940089970827 2.77967078611255 +-1.31173273175955 -0.517481461167336 +0.833832927048206 1.48648469895124 +-1.52717601507902 2.76573925465345 +0.77579864487052 0.949231553822756 +0.40292314812541 0.866590268909931 +-2.65465438365936 1.95173065364361 +2.35742055997252 1.74279253929853 +-0.355315253138542 1.180599514395 +-1.67465360090137 1.91641176491976 +-0.0689448341727257 -0.909220825880766 +-2.45754731819034 1.930813934654 +-0.831767980009317 1.52211364358664 +1.35855319350958 0.420334480702877 +-2.05117319896817 1.99851844459772 +1.51295032352209 -0.117037374526262 +-0.46902297437191 1.24475559592247 +0.859275877475739 0.793505057692528 +0.396627712994814 0.832260087132454 +1.6038192473352 2.52190383151174 +1.36388263106346 -0.809721235185862 +0.296783827245235 -1.50593790411949 +2.18300115689635 1.51716662198305 +1.50742676854134 1.05989731475711 +0.941965255886316 -0.994952667504549 +-0.881794955581427 0.315977051854134 +0.508670881390572 0.593575734645128 +1.44651893153787 0.682130258530378 +0.907365724444389 1.62373670190573 +1.29709115996957 0.930207539349794 +-1.67135568335652 1.51227409765124 +-2.41698196530342 1.96667265146971 +1.06512136012316 0.518598642200232 +-1.78652555495501 1.06272687017918 +2.17286586016417 2.62133271619678 +-0.13370419293642 0.73027215898037 +-0.695973422378302 0.132425963878632 +1.70408631488681 1.97604199871421 +-1.34619582816958 1.08122764527798 +0.635242152959108 -0.199861787259579 +-1.10192470252514 2.05480550974607 +-0.94717800989747 -0.630246590822935 +1.70300518348813 1.55034571886063 +-2.23884864524007 1.66524270921946 +0.516748148947954 -0.628420948982239 +-0.182907346636057 -0.0645793564617634 +-1.48728507384658 1.28508923202753 +-1.33415547758341 -0.553155098110437 +1.10575541481376 0.989736843854189 +-0.67467175796628 -1.80816911533475 +1.66922806575894 2.65700840950012 +-1.18470480665565 1.74626357108355 +1.24073842167854 -0.374344494193792 +-0.451939143240452 1.71041077747941 +1.64350361004472 -0.187231339514256 +0.586312063038349 -1.28227543085814 +0.752832692116499 1.52826872095466 +1.9040132984519 -0.384332694113255 +0.547763612121344 -0.21344643458724 +-0.470666497945786 -1.21811608970165 +0.76456231251359 -0.415471844375134 +1.30486880242825 0.959521226584911 +0.425816066563129 -0.82542372494936 +-0.859390750527382 -0.779338870197535 +-1.74391374737024 1.65754588320851 +-0.119972255080938 0.741616319864988 +0.506005618721247 0.609428849071264 +-1.1955440826714 2.49096083641052 +-1.19979045912623 -0.0102192126214504 +-0.0651886761188507 -0.99269525334239 +1.27262368798256 -0.254643421620131 +-0.450716532766819 1.62174102291465 +-2.5569444745779 1.6445697247982 +-1.39219756796956 -1.29068395867944 +2.6287630200386 2.14241267368197 +1.91449949145317 2.79996054247022 +1.15883900225163 0.00188637524843216 +0.982614494860172 -0.872258208692074 +1.41929485276341 1.45608067512512 +-2.48507366701961 2.26417628675699 +-2.59149982407689 2.65095861256123 +1.05873238667846 -1.66309567540884 +-1.44110786914825 -0.89272603392601 +1.42776192724705 1.21658572182059 +1.86667015776038 0.478294964879751 +-2.29095414280891 2.70314594730735 +1.03569241240621 1.08555588126183 +2.37809075787663 2.63908607512712 +-0.636470831930637 -1.30283078178763 +0.575023286044598 -0.083458948880434 +-2.95411188527942 1.99973373115063 +1.66172904521227 0.721444431692362 +0.874346606433392 -0.320394761860371 +0.914382807910442 1.72320305556059 +-1.54675411432981 0.810082443058491 +-0.0306556299328804 0.810736399143934 +-0.778492107987404 1.00261883437634 +1.94767145067453 2.39114357531071 +0.12454404681921 1.47197991237044 +0.640425965189934 -0.915675807744265 +1.30756982415915 0.257875148206949 +0.463131565600634 -0.510185789316893 +-1.94290655106306 0.390200689435005 +0.861060719937086 1.69475064426661 +-0.596020724624395 -1.31833404675126 +-0.967709474265575 -1.41061119735241 +1.86148541420698 2.1914074011147 +1.33700554817915 -0.494080230593681 +1.9155209325254 1.17085805907845 +1.5216115154326 -0.656151257455349 +-0.988004796206951 -1.05279023945332 +-1.79974020645022 1.52944333478808 +-1.58436769247055 -0.293444685637951 +-0.379862658679485 -0.562306210398674 +1.54062598571181 -0.585036925971508 +0.54957078769803 0.0526377521455288 +-1.90664768218994 2.79625792056322 +-2.53612166643143 2.13662191852927 +1.25708016380668 -0.423956073820591 +-0.268237039446831 1.23717963322997 +1.50826417654753 -1.21894085779786 +-1.37412555888295 0.956090498715639 +-1.70549262315035 1.26755261048675 +-0.95484346523881 -0.550038985908031 +-1.07965064421296 -0.615462113171816 +-2.45840615779161 2.29837606847286 +2.36267644166946 2.34142276272178 +-1.44061214104295 1.3544699177146 +0.991551131010056 0.192100442945957 +-0.491512577980757 0.23652096837759 +-1.38528561964631 -0.634260300546885 +1.32833851501346 -1.16645000502467 +-0.0383889861404896 -1.9851943179965 +-0.183512911200523 1.44900576025248 +-1.31814044713974 -0.668087001889944 +-2.38698591664433 2.664770077914 +-0.0702574104070663 1.05192149430513 +-1.50116031616926 2.51582760363817 +0.116251889616251 0.859274040907621 +-0.589289251714945 -1.60551861673594 +0.259310230612755 0.956516105681658 +0.408254042267799 0.605765525251627 +-0.353813420981169 0.925535842776299 +2.21094064414501 2.53124418109655 +0.00580412149429321 1.07951683923602 +-0.128202505409718 -1.35972686484456 +-0.0835149474442005 -1.40766746178269 +-0.685722127556801 0.633070666342974 +-0.228976972401142 0.716798476874828 +-0.314142514020205 0.87760741636157 +-1.63135157898068 2.59023092314601 +0.48067232593894 -1.92003295198083 +-0.839801024645567 -0.365892615169287 +0.294471867382526 0.42062134668231 +-2.51683387905359 1.88696189597249 +-0.419275209307671 0.559270903468132 +1.75273936614394 -0.429852154105902 +-0.166549928486347 -1.42955809831619 +-0.493462979793549 -1.54558394104242 +-1.70016938447952 1.3782924413681 +0.0676529332995415 -0.203526772558689 +-0.638028036803007 0.0358610637485981 +-2.8336612097919 1.52146966010332 +-0.779725268483162 0.905260171741247 +1.51560585945845 2.78766296803951 +-0.722736071795225 0.352756779640913 +-1.84800857305527 1.27182929590344 +0.982172276824713 0.501089427620173 +0.307786900550127 0.148968111723661 +1.71013582125306 -0.303893703967333 +-2.0330076366663 1.83816499635577 +0.706926513463259 1.39347774535418 +1.27343387529254 2.10188510268927 +-0.0797674246132374 0.0670081079006195 +-1.29521260783076 2.1950269639492 +0.192932900041342 0.0572373494505882 +0.231393404304981 0.269573837518692 +-0.278286520391703 -0.699632372707129 +0.958856523036957 -0.246140528470278 +0.431317817419767 1.9147194288671 +1.23021182790399 1.18564041703939 +0.451975125819445 0.0850551538169384 +1.66893910616636 2.26893594488502 +-1.73326994478703 1.62885748967528 +-1.69280558824539 1.34039432182908 +0.169915474951267 0.931300632655621 +-1.96734732761979 1.43532429635525 +-1.41516100242734 1.56895295530558 +-1.38352558389306 1.20329409092665 +1.93590002506971 -0.462315123528242 +1.62447164952755 -0.468720607459545 +1.35286011919379 -1.30981515720487 +-0.57337437197566 -1.43897854164243 +-0.752261783927679 -0.412747032940388 +0.460095845162868 1.23995196446776 +-1.2392801977694 0.51284246891737 +-0.41037392988801 0.768059007823467 +-0.783293485641479 1.80975548923016 +1.55770405009389 0.696090243756771 +-2.15515127778053 2.26856860890985 +-1.69504843652248 -0.283982966095209 +-1.57191829383373 2.63137193769217 +0.560173291712999 -1.2617424428463 +-1.6279844045639 -0.617061264812946 +-0.680581245571375 1.0983675122261 +2.03615063801408 1.00735822319984 +-1.73675161227584 1.33363227918744 +-1.86319487914443 0.156173717230558 +-1.24781316518784 0.947194576263428 +-0.391869205981493 1.37865060195327 +0.136743288487196 -1.90284360945225 +0.999140165746212 1.66502907127142 +-1.34718973189592 1.69857199862599 +0.797701835632324 -1.81586116552353 +-0.0178047902882099 1.26013637706637 +0.871120415627956 -0.644829604774714 +0.224457394331694 1.51280977204442 +0.859597332775593 0.919819686561823 +-2.82274105399847 1.71816676482558 +-1.53467748686671 0.467698305845261 +1.55112740024924 -0.47682711482048 +-0.203193809837103 1.10159240290523 +1.76938148960471 2.16226059570909 +-2.65340813621879 1.31114920973778 +-0.756886769086123 -1.03708986192942 +0.238795887678862 0.788461770862341 +-2.00365307927132 2.77997479587793 +-1.78383065760136 0.0650591738522053 +-1.64814525097609 1.73535230383277 +0.461335901170969 -0.95270873233676 +-0.163613315671682 -0.503504827618599 +-1.04729107394814 -0.477535285055637 +-1.81674943119287 1.50311726331711 +-2.63541975989938 2.63445358350873 +-1.67418662458658 2.61837893351912 +1.36899954080582 1.87464229762554 +-0.178647540509701 -1.80277233570814 +0.85903437808156 -0.224506821483374 +-0.435678482055664 1.03995686024427 +-0.138757973909378 -1.00327798724174 +0.348497241735458 -1.73964801430702 +-0.262425389140844 -1.08806496486068 +-1.78051641210914 0.47169267386198 +0.039889931678772 1.75439198687673 +0.514491584151983 -0.682184107601643 +0.811419419944286 -1.64077165350318 +-1.63327368348837 1.09899342805147 +0.0793292708694935 -1.99415926262736 +0.695150371640921 -1.60071619600058 +0.215909935534 1.71501702442765 +-1.62622308731079 2.52008878812194 +-1.74339772388339 0.199343893676996 +0.221649516373873 0.829091165214777 +0.113631065934896 -1.19130916520953 +1.46434355527163 -0.552903942763805 +0.939274299889803 0.933544926345348 +-2.26094883680344 1.27482405304909 +2.4843735396862 2.12675891816616 +2.54108319431543 2.640057310462 +-2.59099861979485 2.66347276046872 +1.87720248475671 0.614995770156384 +0.979909002780914 0.800120614469051 +1.2316859215498 -0.957638815045357 +1.25174754485488 2.22501616179943 +0.827509246766567 0.923771508038044 +-0.457216303795576 -1.36238705739379 +1.92662385478616 0.158460460603237 +0.370570465922356 1.59593136236072 +-1.3883591145277 -1.29616727679968 +0.485752776265144 -0.274427317082882 +-0.198016218841076 -1.36465971544385 +-0.647671967744827 -1.31787549704313 +1.55457253754139 -1.07191491127014 +0.576873153448105 0.523816529661417 +-0.290570918470621 0.00647160038352013 +0.288516711443663 -1.94957103952765 +-0.163418773561716 0.0187129341065884 +-2.21634349599481 1.93698082864285 +1.83702322840691 0.696440611034632 +1.47630554437637 0.346505317837 +-0.151858769357204 -0.101670775562525 +-0.61121342331171 -0.0884107388556004 +-1.56626315042377 1.30609018728137 +0.993360459804535 1.04189617931843 +-0.55113472789526 1.01046715304255 +1.93856098130345 1.03804461658001 +0.576242677867413 -1.89826102554798 +-0.153248734772205 1.15408905223012 +-1.38945835828781 0.839608117938042 +2.80443396046758 1.7546565271914 +0.400887615978718 1.3712489195168 +1.88074469193816 1.25738567858934 +-1.17537642270327 1.13607070967555 +-1.44263596832752 1.65536530315876 +-1.32891277223825 -0.0720656178891659 +-1.13314041122794 1.2361895032227 +-1.18639327213168 0.619502812623978 +-2.35144691541791 2.88997457921505 +-1.26051409170032 -0.68239164724946 +1.90215446427464 0.508575521409512 +2.16122414916754 2.65289481729269 +-2.59469514712691 1.48937852680683 +0.321152102202177 0.543348021805286 +1.50541293248534 -0.106069039553404 +0.758748143911362 0.826878488063812 +1.12901883199811 0.853483460843563 +0.798352342098951 -1.24532177299261 +-1.43217619135976 -0.836231026798487 +0.452941067516804 0.230222824960947 +-1.24927673488855 -1.2335900105536 +-0.0330780185759068 1.10919252410531 +-0.123534847050905 -1.05524901673198 +-0.993950948119164 1.33412408828735 +1.38233162090182 2.54086866602302 +-1.63140538334846 2.82513345405459 +-1.92349470034242 0.141484968364239 +1.30741706863046 0.266764253377914 +2.93539988622069 1.76241579651833 +-1.29918939992785 -1.20068853721023 +-0.580944359302521 -0.760914769023657 +-0.145981598645449 -0.341704532504082 +0.823990140110254 0.244071137160063 +1.37918548285961 -0.153212416917086 +-1.19075786694884 -0.0831953920423985 +0.220914229750633 -0.0651553757488728 +-0.552572384476662 0.543335869908333 +1.57127697393298 0.449144970625639 +0.0944934375584126 -1.60683101788163 +-1.38165763393044 1.61453429237008 +0.0320120491087437 -1.21238803863525 +-2.61676991730928 1.6692368760705 +0.568882815539837 -0.750609058886766 +2.7475122846663 2.31340386345983 +-0.631636992096901 -0.869823697954416 +-1.02035494893789 1.23426667973399 +-2.30327251553535 1.07606317847967 +1.2536935955286 -1.02421547472477 +-0.0205694772303104 0.201753720641136 +1.98304214328527 2.87250499799848 +1.72979063540697 1.22986351326108 +-1.5397043004632 2.73696993291378 +0.0236088894307613 -0.444173123687506 +-1.87184008955956 2.14831002801657 +1.48865288868546 2.61171882227063 +-0.00229827687144279 -0.0606071501970291 +0.251513376832008 -0.213689349591732 +0.625349216163158 -0.363451439887285 +0.703389417380095 -0.961676374077797 +-1.08963143825531 2.39075188711286 +1.65403419360518 1.81590060889721 +0.0498038753867149 0.609751727432013 +-0.911196287721395 0.450370956212282 +-1.27375311776996 -0.626686055213213 +0.170301612466574 -0.0526948645710945 +-0.473431710153818 -1.73027384281158 +2.67757051065564 1.46168432384729 +0.918323513120413 -0.682236436754465 +1.01495398581028 -0.343121137470007 +2.65361326560378 1.51973400264978 +0.577519971877337 -0.250002410262823 +-2.5977134257555 2.3665869012475 +-1.50275629013777 1.82382469251752 +-1.27371744066477 1.90497035160661 +1.15311259776354 -0.835548166185617 +0.91357284411788 -0.424510281533003 +0.248583793640137 -0.479910355061293 +1.04244128242135 0.964618571102619 +0.32025945559144 -1.9448094367981 +-1.35571758821607 1.81048074737191 +-1.13960805535316 -1.37360360473394 +0.83403517305851 1.4062983840704 +-1.78757210075855 2.91149929538369 +-1.43691016733646 0.197713892906904 +-0.691463831812143 -1.74857584387064 +-1.45266249775887 -0.515165153890848 +0.884010519832373 -0.371119938790798 +-2.58516504988074 1.61748562753201 +-0.784489590674639 0.345853377133608 +-1.33161356672645 -0.903216425329447 +2.32880504056811 2.29091412946582 +1.80405613034964 0.389507248997688 +-1.46485745161772 -0.751125585287809 +-2.74127555638552 1.59760411083698 +-2.08028296753764 1.62259260565042 +1.56542291119695 -1.15426879003644 +0.492393050342798 0.999535616487265 +1.00796457380056 0.457884706556797 +2.28164030611515 1.57319641858339 +-1.31747916713357 0.0246546342968941 +1.59498060122132 2.73983108624816 +2.09613207355142 2.91043724492192 +-0.0928696244955063 0.590001679956913 +0.602534610778093 0.686082504689693 +-1.26303784176707 1.27988543733954 +-1.38072663545609 1.92611268535256 +0.757522255182266 1.67052263021469 +0.413585178554058 0.980075154453516 +-0.144942931830883 1.9943518564105 +-1.14773716777563 1.37818126380444 +1.72325848788023 2.52823302894831 +-2.34290808439255 2.73114396631718 +-0.179001700133085 -0.0550750643014908 +-0.41447664052248 -0.554839044809341 +-0.0129327736794949 -1.59723911061883 +1.39581148326397 0.574153542518616 +-0.935020532459021 -1.30064840242267 +1.76168300956488 2.23809352517128 +-0.795723930001259 -0.957099143415689 +-0.99784841388464 0.837770022451878 +-0.209455534815788 -1.06291905790567 +-0.225433152168989 -0.259807176887989 +-0.301143910735846 0.0607243403792381 +-1.27852840349078 2.62999890372157 +-1.70671638846397 0.301549430936575 +-1.61815506592393 -0.235432423651218 +1.50110031664371 1.54744931682944 +-0.594201270490885 -0.864518139511347 +0.972399964928627 1.13791256025434 +-0.748130522668362 -1.0156760700047 +-1.41649935394526 1.54398607462645 +0.114728171378374 0.319723188877106 +-0.853777911514044 0.755842469632626 +-2.08339603617787 1.36205420270562 +1.55280862003565 2.27700191363692 +0.200363930314779 -1.44019917398691 +1.24844966828823 1.28416742011905 +-2.75884722918272 1.40484087914228 +-0.333295788615942 -1.35368778929114 +-1.22975806519389 1.97530345246196 +-1.58023802191019 -0.413470722734928 +0.0334147177636623 -0.513916682451963 +0.723606489598751 1.02234188094735 +-2.71192400157452 2.60093681141734 +1.98991824313998 2.24736044555902 +0.202114675194025 -1.30011815205216 +0.574047591537237 -1.50872865691781 +0.155146811157465 -0.387892227619886 +1.15880945324898 -1.34263291954994 +-2.33879107609391 2.50527653843164 +1.56092922016978 0.167431049048901 +0.522793278098106 1.45531935244799 +-1.54275423288345 1.15091450512409 +-0.206551656126976 1.17583490908146 +-1.64678621292114 0.81617621704936 +1.39004661142826 -1.36211525648832 +-2.31573989987373 2.75242104381323 +-0.878732800483704 0.0429337695240974 +1.24058966711164 -0.147294286638498 +-1.23168898373842 -0.342192586511374 +0.997788283973932 -0.652579020708799 +0.884458281099796 -1.02352900803089 +1.45759299397469 0.601071204990149 +-0.496292110532522 1.05267734453082 +0.90608487278223 -1.27165342494845 +1.5320561863482 2.79862427338958 +1.0955261066556 0.816902466118336 +0.775460373610258 1.63069909438491 +-0.382957138121128 0.93674449250102 +-1.69352323561907 0.294119261205196 +0.0259130746126175 -0.343311481177807 +0.0930146016180515 0.185167338699102 +1.32440802082419 -0.415522642433643 +0.812604624778032 -0.312057659029961 +-0.0775798782706261 1.50756289064884 +-1.00173954293132 -0.215883601456881 +2.67004242166877 1.4218055345118 +1.30557139217854 0.40382607281208 +-1.09812697768211 0.232875410467386 +-1.2340138964355 -0.684770572930574 +0.198747079819441 0.870595429092646 +-0.0235326960682869 -1.08960454910994 +-1.67062331736088 2.7199916318059 +2.41180004179478 1.96345344558358 +-1.88445619493723 0.261486139148474 +-0.221267484128475 0.652395274490118 +-1.30120273306966 0.328293528407812 +1.78156499192119 1.34660445526242 +-2.32475391030312 1.95731552690268 +-0.684544961899519 -0.527581188827753 +-1.25115188583732 2.44621851295233 +-1.99148962274194 1.38984725996852 +-0.485062200576067 0.906633965671062 +-0.40848821029067 1.16564637422562 +-1.22740808129311 1.37795377522707 +-1.96416359767318 2.90018537640572 +2.09066881239414 1.17756043002009 +-2.37176163494587 2.83598211407661 +-1.32914339005947 1.654003161937 +1.57798295840621 0.151651278138161 +-1.02515124902129 1.63954532891512 +-0.998364966362715 0.264427300542593 +0.128578003495932 0.178472761064768 +-2.47440527006984 2.01879696547985 +-0.412627834826708 1.78635150194168 +-1.55071283131838 0.0105222910642624 +1.80423687398434 1.65973404794931 +-1.77880709990859 -0.528857339173555 +-0.325239479541779 1.27368592843413 +-0.356961462646723 1.657122656703 +0.596778780221939 -0.239285033196211 +0.994327709078789 -0.490970946848392 +2.11159466207027 2.66314993798733 +-2.0514238961041 1.25417925789952 +0.321315355598927 -0.822392407804728 +-1.26050077751279 1.34788377210498 +2.83669743686914 1.59180461242795 +1.16895213350654 0.14076566323638 +-2.31611763313413 2.21438378095627 +-1.18570892885327 1.07820708677173 +-2.81989406421781 1.97559417784214 +-2.35266311094165 1.80736849829555 +-0.0144418105483055 -1.66389140486717 +1.77999041974545 -0.800824642181396 +2.19892741367221 2.63303076848388 +2.54977072775364 2.61417213827372 +-0.399920649826527 -1.80029607564211 +1.23175796493888 -0.498138599097729 +-2.49276819452643 2.33056356012821 +-2.75430783629417 1.75094221532345 +-0.993552930653095 -0.716093100607395 +1.11204611137509 1.92595108225942 +1.9606952778995 1.21276931092143 +2.50044172257185 1.69504882767797 +-1.48710029944777 1.59553980082273 +-1.87748043984175 -0.36412887275219 +-1.96315219625831 2.17867825552821 +0.0192176178097725 -1.25796961784363 +0.316905941814184 -1.09806570038199 +-1.64277904480696 0.194241311401129 +0.0660528838634491 0.244520992040634 +-0.16504018381238 -0.40176809951663 +-0.775603737682104 0.0182349272072315 +1.18007648363709 1.14607951045036 +-1.49661236256361 0.644469138234854 +-0.166560478508472 -1.38236621394753 +-0.492110334336758 -1.09931644052267 +-0.659330703318119 -0.0277940034866333 +1.17636237666011 -0.855551443994045 +0.861180480569601 0.24104718118906 +2.52849067002535 1.61936584115028 +1.07524510100484 0.08399398624897 +1.37647414579988 1.43195811286569 +-1.04186408221722 -1.40898194164038 +0.792264949530363 1.78557724505663 +-1.41825994104147 1.80304077267647 +0.348236609250307 1.24237727373838 +-2.25615164637566 2.18553548306227 +1.7466052621603 2.21175952628255 +-2.14185043051839 2.59910954907537 diff --git a/inst/dev/datasets/cec/mouse_1_classic/iteration.txt b/inst/dev/datasets/cec/mouse_1_classic/iteration.txt new file mode 100644 index 00000000..8580e7b6 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/iteration.txt @@ -0,0 +1 @@ +30 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_classic/type.txt b/inst/dev/datasets/cec/mouse_1_classic/type.txt new file mode 100644 index 00000000..b7066b99 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_classic/type.txt @@ -0,0 +1 @@ +fixed_covariance \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_spherical/aproximation.txt b/inst/dev/datasets/cec/mouse_1_spherical/aproximation.txt new file mode 100644 index 00000000..7813681f --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/aproximation.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_spherical/cluster.txt b/inst/dev/datasets/cec/mouse_1_spherical/cluster.txt new file mode 100644 index 00000000..9ba7de4c --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/cluster.txt @@ -0,0 +1,3000 @@ +2 +2 +1 +2 +2 +3 +2 +2 +2 +1 +3 +3 +2 +1 +2 +2 +1 +2 +3 +1 +2 +2 +3 +2 +2 +3 +2 +1 +2 +2 +2 +3 +1 +3 +1 +2 +2 +2 +2 +2 +2 +3 +3 +2 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +3 +2 +1 +2 +2 +2 +2 +2 +1 +3 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +2 +1 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +1 +1 +3 +1 +3 +2 +2 +2 +2 +2 +3 +2 +1 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +3 +2 +2 +1 +3 +3 +2 +3 +1 +2 +3 +3 +3 +2 +1 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +3 +2 +1 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +1 +2 +3 +2 +2 +1 +1 +2 +1 +2 +3 +1 +1 +2 +2 +2 +1 +2 +2 +1 +1 +2 +3 +1 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +1 +1 +2 +2 +2 +2 +3 +2 +1 +3 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +3 +2 +3 +3 +2 +2 +2 +2 +2 +1 +1 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +1 +2 +3 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +3 +2 +2 +1 +1 +1 +2 +2 +2 +1 +2 +2 +2 +2 +1 +1 +3 +2 +1 +3 +2 +3 +2 +1 +3 +1 +2 +2 +3 +2 +2 +2 +2 +2 +1 +3 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +1 +2 +3 +2 +2 +2 +3 +1 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +1 +2 +1 +1 +2 +3 +2 +2 +2 +2 +1 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +1 +2 +2 +1 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +1 +2 +2 +1 +2 +2 +3 +2 +2 +3 +2 +3 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +3 +3 +1 +2 +1 +1 +2 +3 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +3 +2 +3 +2 +2 +2 +1 +3 +1 +3 +2 +2 +2 +3 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +3 +1 +2 +3 +2 +2 +2 +2 +3 +2 +2 +3 +3 +2 +3 +1 +3 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +3 +2 +2 +1 +1 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +3 +2 +3 +1 +1 +2 +2 +1 +1 +1 +2 +3 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +3 +1 +2 +3 +1 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +1 +1 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +3 +1 +1 +3 +3 +1 +2 +1 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +3 +1 +3 +1 +2 +1 +2 +2 +1 +2 +2 +1 +1 +1 +2 +2 +2 +2 +3 +2 +3 +2 +2 +1 +2 +2 +3 +3 +2 +2 +2 +3 +1 +1 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +3 +1 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +3 +2 +2 +2 +1 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +3 +2 +2 +2 +3 +3 +2 +1 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +3 +3 +2 +2 +1 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +3 +3 +1 +3 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +3 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +3 +1 +2 +2 +1 +2 +2 +3 +1 +2 +2 +3 +3 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +3 +3 +3 +1 +2 +2 +2 +2 +1 +1 +1 +1 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +3 +1 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +1 +3 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +2 +3 +1 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +3 +1 +2 +1 +2 +2 +2 +3 +3 +2 +2 +1 +2 +2 +2 +2 +1 +2 +1 +3 +2 +2 +2 +2 +2 +1 +2 +2 +3 +1 +3 +2 +1 +2 +2 +3 +2 +1 +1 +3 +2 +3 +1 +2 +2 +3 +2 +2 +1 +1 +2 +1 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +1 +2 +3 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +1 +2 +2 +2 +3 +2 +2 +2 +3 +1 +1 +3 +2 +2 +1 +1 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +3 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +3 +2 +1 +2 +2 +3 +3 +2 +1 +1 +2 +2 +1 +1 +3 +2 +3 +1 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +1 +1 +2 +2 +2 +2 +3 +2 +3 +1 +2 +3 +2 +3 +1 +2 +1 +3 +2 +2 +3 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +3 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +1 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +1 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +3 +3 +3 +3 +1 +3 +3 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +1 +1 +1 +2 +3 +2 +2 +2 +1 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +1 +3 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +3 +2 +2 +3 +2 +3 +3 +2 +3 +2 +3 +2 +2 +3 +2 +1 +2 +1 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +3 +2 +2 +3 +1 +3 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +3 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +3 +1 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +1 +2 +1 +2 +2 +1 +1 +3 +2 +2 +2 +3 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +3 +1 +1 +1 +3 +2 +3 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +1 +2 +2 +1 +2 +2 +2 +1 +1 +1 +2 +2 +2 +1 +1 +2 +2 +3 +2 +2 +3 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +2 +3 +1 +1 +2 +2 +2 +3 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +1 +3 +2 +1 +3 +2 +1 +2 +1 +2 +2 +3 +2 +3 +2 +2 +2 +2 +1 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +3 +3 +3 +2 +1 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +3 +2 +3 +2 +2 +2 +1 +3 +1 +3 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +2 +1 +2 +3 +2 +2 +1 +2 +2 +1 +3 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +3 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +1 +2 +2 +1 +2 +2 +3 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +1 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +1 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +1 +1 +2 +2 +2 +3 +3 +2 +3 +2 +1 +2 +3 +2 +1 +2 +2 +1 +2 +1 +1 +3 +3 +2 +2 +2 +2 +2 +2 +1 +3 +2 +3 +2 +3 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +1 +1 +3 +2 +2 +3 +2 +2 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +3 +2 +1 +3 +2 +1 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +3 +1 +3 +1 +3 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +1 +2 +2 +1 +2 +2 +3 +3 +2 +2 +2 +2 +2 +3 +2 +1 +1 +2 +3 +3 +1 +1 +3 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +1 +3 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +1 +2 +2 +1 +2 +2 +2 +2 +1 +2 +3 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +1 +3 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +3 +1 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +3 +1 +2 +2 +2 +2 +3 +1 +2 +3 +2 +2 +2 +2 +2 +3 +1 +2 +1 +2 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +1 +2 +2 +1 +2 +3 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +3 +2 +2 +3 +1 +2 +3 +2 +3 +2 +2 +3 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +3 +1 +2 +2 +1 +2 +2 +3 +2 +1 +3 +2 +2 +3 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +3 +2 +1 +1 +2 +2 +1 +3 +3 +2 +2 +2 +2 +3 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +3 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +2 +2 +3 +1 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +3 +2 +1 +2 +3 +2 +2 +2 +3 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +3 +3 +2 +3 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +1 +3 +2 +2 +3 +2 +3 +2 +2 +2 +3 +3 +3 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +1 +1 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +1 +2 +1 +2 +3 +2 +2 +2 +3 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +2 +1 +2 +2 +3 +2 +2 +1 +1 +3 +2 +3 +1 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +3 +3 +3 +2 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +3 +3 +2 +2 +2 +1 +2 +1 +1 +2 +2 +2 +3 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +3 +2 +2 +3 +1 +2 +2 +3 +2 +3 +2 +2 +2 +3 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +1 +2 +2 +2 +1 +3 +2 +3 +3 +2 +2 +2 +3 +1 +3 +3 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +3 +2 +3 +3 +2 +2 +1 +1 +2 +2 +3 +3 +2 +1 +1 +1 +3 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +3 +2 +3 +1 +3 diff --git a/inst/dev/datasets/cec/mouse_1_spherical/dimension.txt b/inst/dev/datasets/cec/mouse_1_spherical/dimension.txt new file mode 100644 index 00000000..d8263ee9 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/dimension.txt @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_spherical/energy.txt b/inst/dev/datasets/cec/mouse_1_spherical/energy.txt new file mode 100644 index 00000000..f1ff1e78 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/energy.txt @@ -0,0 +1 @@ +3.244514 diff --git a/inst/dev/datasets/cec/mouse_1_spherical/input.txt b/inst/dev/datasets/cec/mouse_1_spherical/input.txt new file mode 100644 index 00000000..ec0908ad --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/input.txt @@ -0,0 +1,3000 @@ +1.61209664866328 -0.413233458995819 +1.7419871352613 0.422129414975643 +2.60966323688626 2.46355983987451 +0.568149335682392 0.856650270521641 +0.851572871208191 -0.572510071098804 +-1.7647741548717 2.44094736874104 +0.0541942827403545 1.43476314470172 +-0.930218894034624 0.573868732899427 +-1.6590390317142 -0.3955920599401 +1.34443809464574 2.4018629565835 +-1.95066077634692 1.31018867716193 +-1.99102317169309 2.0375639796257 +-1.87702622264624 0.490582834929228 +1.18097758665681 2.49035001918674 +1.75239836424589 1.01476220041513 +-1.3817829452455 -0.516242548823357 +2.34617390856147 1.08066613599658 +-0.212645802646875 -0.185363076627254 +-1.002137940377 2.07395137101412 +1.96139129623771 1.15754506364465 +-1.17359426617622 -0.0368270874023438 +1.59891119971871 -1.13823710381985 +-1.92849291115999 2.71003712341189 +1.03600108250976 -1.59709620848298 +1.58836380019784 -0.717748995870352 +-2.6994837410748 1.43561523407698 +0.704492211341858 -1.67905734106898 +1.16078396886587 2.09976300224662 +0.32393553853035 0.34012009203434 +-1.48058445751667 -0.862582374364138 +0.527839832007885 1.35229524224997 +-2.25163902342319 2.51089239120483 +2.7304120734334 2.23758437484503 +-2.0815456584096 2.57961105182767 +2.00750983133912 1.18487352877855 +-0.0966376960277557 0.740540828555822 +1.93578931689262 -0.392697624862194 +-0.0973851718008518 -1.6135998480022 +1.50237447395921 -0.545705925673246 +-0.0956599004566669 0.0203249007463455 +0.606842920184135 0.921810775995255 +-1.2021614536643 1.63980568572879 +-2.47400256618857 2.00858150050044 +0.700028248131275 0.372448820620775 +-2.44264267757535 2.41181446239352 +-1.99176017567515 1.7488059848547 +0.147386211901903 -0.743320502340794 +0.37871852144599 -0.961705703288317 +-0.478621114045382 1.40059468522668 +-0.0425610728561878 -0.853261288255453 +2.53069310635328 1.59227794408798 +-1.87901111319661 -0.358152363449335 +0.954459019005299 1.39409958571196 +0.324125435203314 -1.40645006299019 +-0.395102594047785 -0.578349638730288 +0.714126072824001 1.09385222941637 +2.26894120872021 2.86285768821836 +-0.525708746165037 1.92314795404673 +-0.439350225031376 -1.20656536519527 +1.85878024995327 0.408548388630152 +-1.34356329217553 -1.16515595465899 +1.88550673425198 1.81114589050412 +2.05061941593885 1.17919966578484 +0.859156642109156 -1.40385042130947 +1.74830450490117 -0.508347071707249 +-1.49666209518909 -1.10139701142907 +1.79499476775527 0.419189963489771 +0.00937820971012115 1.21121746301651 +-1.62085117399693 -0.471252281218767 +1.84430212154984 -0.222130320966244 +0.313748493790627 0.978976618498564 +2.5580178797245 2.22202425822616 +0.149811711162329 1.75097593292594 +0.100270200520754 1.13340963423252 +1.23176250979304 -0.748868733644485 +-0.873373385518789 0.285578969866037 +0.214500464498997 1.89648038521409 +1.5453750230372 1.45198041573167 +-1.32176746800542 1.87009589001536 +-0.216902684420347 1.42689475789666 +2.03494207933545 2.21144592016935 +-0.135794829577208 -1.17052250355482 +1.49550383910537 -0.500416904687881 +-0.989499978721142 -0.840314164757729 +1.12359218671918 1.31851376593113 +-1.38978365808725 0.76328818872571 +2.23130672425032 2.63739766553044 +-1.8116545714438 1.19852881133556 +0.630826715379953 1.13814905285835 +1.69685072079301 2.77633919939399 +-0.464271951466799 -0.822133090347052 +0.958454590290785 -0.427048087120056 +1.24529281631112 0.38800411298871 +2.15577581897378 2.38636680319905 +1.31874823570251 -0.872197724878788 +1.61186712607741 -0.760608464479446 +1.50582986697555 -0.649233177304268 +-1.16123018413782 0.353783454746008 +2.92900170758367 1.94925379753113 +-0.167192131280899 0.43928287178278 +0.889743082225323 1.00621205568314 +0.210233103483915 -0.997596062719822 +1.74519320204854 -0.487294547259808 +-0.679500922560692 1.38434460759163 +-0.146491166204214 0.196907259523869 +1.36268999800086 -0.405691497027874 +-0.753944084048271 -0.0923365391790867 +-1.32451192289591 1.50311958044767 +-1.73169532418251 -0.535160902887583 +0.183907847851515 -1.82662163302302 +1.48520020022988 0.683072898536921 +1.81297934800386 -0.825113583356142 +2.80013621598482 2.39241858571768 +0.851973999291658 1.1710419356823 +2.88015890866518 1.95329306647182 +1.37963550537825 1.72378801181912 +-1.74487737566233 -0.021989319473505 +0.923321183770895 -0.505416136234999 +0.35810911282897 0.171648122370243 +-0.611732892692089 0.628997180610895 +-2.66885285079479 2.43093906342983 +-0.77232750505209 1.796799544245 +1.30631361156702 -0.335120588541031 +0.499342810362577 -1.62093053385615 +0.443861253559589 0.0536024756729603 +-0.770540583878756 1.04491452872753 +0.208602167665958 1.19714096188545 +2.69211648032069 1.44047920405865 +1.42747324332595 -0.774205319583416 +-0.802357636392117 -0.888977598398924 +-0.551691222935915 0.127152539789677 +-0.388330698013306 -1.09207228943706 +2.16791117191315 2.24981886520982 +0.193644858896732 0.806783203035593 +1.35509423911572 -0.441257961094379 +1.19813763350248 0.00904372334480286 +1.21165931224823 1.86079402267933 +1.26811403036118 1.40932361409068 +-1.9056026302278 1.05506280437112 +1.09377502650023 2.38781935349107 +-1.96020850539207 1.41531054675579 +0.663487669080496 -0.665306996554136 +-1.8456757850945 -0.0194409415125847 +-1.91931500285864 -0.0470988340675831 +-0.637594405561686 -1.69683813303709 +-1.46911622956395 -0.256256550550461 +-2.82381584122777 1.77615419775248 +-0.724260363727808 1.8149929754436 +1.09889047965407 2.37745362147689 +-1.4723867662251 2.72274923697114 +1.9817033149302 0.0610696449875832 +2.43553154170513 1.55298114940524 +-0.692302133888006 1.19863210618496 +-0.206275515258312 0.0856613777577877 +-0.225796446204185 0.892047345638275 +0.942115299403667 -1.5204333178699 +-1.93550775572658 -0.353068083524704 +0.654838521033525 -0.0228586420416832 +0.718102071434259 1.20882242545485 +0.749999020248652 0.618096154183149 +-1.3312373906374 2.67259626463056 +-0.58808583766222 -1.80532194674015 +1.285110745579 2.61369458585978 +-2.30057621747255 1.30949757993221 +-0.214167837053537 0.125181913375854 +1.41330854967237 -0.164014380425215 +2.61858379468322 2.53823513165116 +-1.82938589528203 2.5589129589498 +-2.8707655146718 1.54337161034346 +1.16227375343442 0.928168889135122 +-1.06591911241412 2.03961246833205 +1.6409085765481 1.83075737208128 +-0.107716578990221 -0.551954045891762 +-1.59457765892148 1.63932278379798 +-2.16795053705573 2.07921721413732 +-1.4361627548933 1.40881815180182 +-0.750233829021454 1.23842242732644 +1.8784948810935 1.97354388609529 +2.69149800017476 1.89659919962287 +0.779429562389851 -0.347004380077124 +-1.70491071417928 -1.00703772902489 +-0.83432724326849 -0.365319933742285 +-1.71521957591176 -0.967166464775801 +2.17667028680444 1.2198950573802 +0.870618931949139 1.54036268219352 +0.251090154051781 -0.132912382483482 +1.5877863317728 0.678796026855707 +0.785636354237795 -0.224852934479713 +1.36263806372881 -1.2199660949409 +1.36459283530712 2.50574555248022 +-1.01252736151218 1.64294324070215 +1.14787768572569 -0.823890078812838 +2.596291590482 1.4821464009583 +-0.146839492022991 1.85857572779059 +-0.957799948751926 1.01166198775172 +0.84897394105792 -1.58480857685208 +0.529773037880659 -1.73885920271277 +1.59365494921803 -0.93388469144702 +-2.55761077255011 2.52492552250624 +-1.00085066631436 1.2092454507947 +0.742211189121008 -1.14398978278041 +-0.328856363892555 -1.55320810899138 +-1.02509764581919 1.52784418314695 +-2.05574489012361 1.35704055801034 +0.81254443898797 -0.0315317176282406 +-0.257675908505917 -0.178346656262875 +-0.67271126434207 0.731910966336727 +1.37309474125504 1.71898373961449 +-0.906342931091785 -0.848964512348175 +-1.0287752635777 -1.34678086638451 +-2.25897191092372 1.44886104017496 +-0.598325457423925 -0.0408293828368187 +2.32446739077568 1.48157031461596 +-1.4871674887836 -0.864731453359127 +0.442404374480247 0.84542427957058 +-2.90436279401183 2.09229874610901 +0.304890420287848 -1.81511974334717 +1.88274436071515 2.18269338458776 +-0.920551784336567 -0.365165416151285 +1.14236346632242 -1.29492803290486 +1.04628176987171 -0.545630246400833 +0.399722006171942 1.84942538663745 +-1.00721666589379 0.163407508283854 +1.40371734276414 -0.268044795840979 +-0.414731193333864 1.27007945254445 +-1.54506851732731 0.555353846400976 +-0.104042284190655 1.70410278439522 +-1.30641648173332 2.18044982478023 +1.24231900274754 -0.37263298407197 +-0.231576219201088 -0.893185991793871 +-0.78636497631669 1.51184230297804 +-0.231641631573439 1.19101314991713 +-0.936298809945583 -0.838305097073317 +0.78686735406518 0.062991090118885 +0.180524613708258 -0.651404362171888 +-2.19469415023923 1.09565394371748 +0.788988001644611 0.933427304029465 +0.291592765599489 0.749692879617214 +0.167921397835016 0.676627937704325 +-0.612701941281557 1.50024525821209 +-0.134262174367905 -0.196316670626402 +0.133057925850153 -0.417828556150198 +1.250812407583 0.748687732964754 +1.91872930154204 2.65982663631439 +1.78028824180365 -0.673218831419945 +0.571906767785549 -0.541913568973541 +1.77438765764236 1.98131423071027 +0.662845615297556 1.18991031870246 +-0.909465260803699 1.0986944437027 +-1.13336053863168 -1.49439633265138 +1.5435923486948 2.85983864963055 +0.0603081919252872 0.75813976302743 +-2.15522218123078 2.13374704867601 +-0.27484630048275 1.24552789330482 +-1.58977943658829 -0.0374957323074341 +2.77542039752007 1.71519975736737 +1.64606515318155 2.12922851368785 +0.886503338813782 -1.78284951299429 +2.72952016070485 2.00538262724876 +0.843258656561375 -1.2337305136025 +-2.65595661103725 2.33931179717183 +1.48117453232408 1.22620160877705 +1.72258114442229 1.51937655359507 +1.68386142700911 -1.05569622293115 +1.46284241974354 -0.705050360411406 +-0.588659510016441 1.4270611256361 +1.96614512428641 1.98830921202898 +0.734626423567533 0.402017414569855 +0.915748301893473 -0.504114560782909 +1.42900901660323 1.66078344359994 +2.25488945096731 2.90158635750413 +0.955485995858908 -1.75332788750529 +-2.88088296353817 2.26876648887992 +2.13912281394005 2.52278220653534 +0.287582330405712 0.00384973362088203 +-0.461033016443253 -0.432767115533352 +2.81660261377692 2.27390728145838 +1.49667909368873 -0.48365531489253 +-2.29053501039743 2.48293719813228 +-1.11716188490391 0.289610158652067 +0.333971489220858 0.0444956198334694 +-1.3790140375495 -0.727075684815645 +-0.796113330870867 0.967301711440086 +0.366976842284203 -1.64739779010415 +-1.28612922504544 -1.14733279496431 +0.37472839653492 -0.901894651353359 +-0.975736565887928 1.17933520674706 +0.605364181101322 -1.61345959454775 +-2.73813807219267 1.7968438602984 +1.57255982980132 1.24518446996808 +1.779820676893 1.28376130759716 +0.285859815776348 0.691839635372162 +-0.910921044647694 -0.594001829624176 +-0.823106937110424 -1.05418333038688 +0.104728970676661 1.74921382591128 +-2.38141652569175 1.12706491351128 +-0.83786728233099 -0.175873752683401 +2.11039216071367 1.83155750855803 +-1.99030840024352 1.51974556222558 +2.68357738479972 2.48060549423099 +-1.1845739595592 0.326227597892284 +-1.7077529579401 2.62458549067378 +-1.23910231888294 1.22679214179516 +-0.124209456145763 -0.468802977353334 +-1.1875561773777 1.26572915911674 +-1.60731089115143 -0.422453884035349 +-1.14812042564154 -1.17296344786882 +-0.884335368871689 -1.00313790887594 +0.723984904587269 0.207578588277102 +-0.0211784578859806 0.759385578334332 +-2.3401742130518 2.87697842344642 +0.482478722929955 0.578970745205879 +-2.80467076599598 2.5091815367341 +-0.152245610952377 -1.98909625038505 +-0.976488165557384 0.765861105173826 +0.358756449073553 1.77729746699333 +1.40397672355175 -1.33897558227181 +-2.73336103186011 1.93623175472021 +1.33247448503971 -0.801424868404865 +-0.771697003394365 -0.867804300040007 +-1.1027612388134 1.39652150124311 +-0.209395322948694 -1.08475765585899 +-1.9979960732162 1.9581238925457 +2.07440412417054 1.3293282687664 +-1.93831306695938 -0.0201470404863358 +1.4558743275702 0.454823911190033 +0.999020967632532 -0.583084352314472 +-1.10364705696702 -0.968060072511435 +-2.12873597070575 2.42020189762115 +-0.806119713932276 -0.558744192123413 +-1.40649508312345 2.07905009388924 +-1.57709585130215 1.44691667705774 +-1.69501673802733 0.722719877958298 +1.12624054402113 0.0203857608139515 +-0.106229163706303 1.46423427760601 +1.66349943727255 0.0891099311411381 +1.0056647323072 -0.449528533965349 +1.17581545561552 2.35557828843594 +2.85731463879347 1.61511135846376 +0.0460674688220024 0.993230998516083 +-2.00231520831585 1.78404529392719 +-0.857668086886406 -0.603680722415447 +0.212190877646208 -0.0847933255136013 +2.08787521347404 2.40160214528441 +-1.48174329474568 -0.511398892849684 +-1.23005585744977 -1.15711344406009 +1.60670629888773 0.144752860069275 +0.611652843654156 1.09040654450655 +0.0687106512486935 1.57068181037903 +0.973474074155092 1.72988789156079 +2.52547753229737 2.24037718400359 +2.12066468223929 2.70268177241087 +-0.0484584085643291 1.39731380343437 +-1.61877960339189 2.90682883933187 +-1.7491229660809 -0.778733663260937 +0.774491168558598 1.33957181870937 +0.114488948136568 1.42440451681614 +-0.76437946408987 -1.79316079989076 +-2.02736221253872 2.26537892222404 +-1.36571399122477 1.04741927608848 +2.26948389038444 2.52961926162243 +-0.806051567196846 1.47987924143672 +-1.54564606770873 0.673107825219631 +1.90441290289164 -0.549355231225491 +-0.323343329131603 1.58480306342244 +0.51525042206049 0.346732381731272 +2.17656968906522 1.03988980129361 +-1.04702944308519 0.146956086158752 +-2.84862881526351 1.64181040972471 +-1.1561822630465 -1.44025735929608 +0.384719599038363 -1.64739570766687 +-1.9167810715735 1.0153863504529 +-0.764548536390066 1.81304601207376 +-0.0982047691941261 1.813994217664 +2.52984330430627 1.34231220185757 +2.89486844837666 1.95676428452134 +2.88041764870286 1.5644040517509 +0.254188776016235 1.51305732503533 +1.2805294804275 0.120650105178356 +-1.69608326628804 0.0137143023312092 +1.42758256942034 1.96696586161852 +1.33042541518807 -1.17135084047914 +-1.8801090605557 0.5263367369771 +1.13387015461922 0.820474114269018 +-1.07915004715323 1.01671554148197 +1.48597346618772 1.63048772886395 +1.50408792868257 1.54940662905574 +-2.4542132653296 1.69421143084764 +1.38832054287195 1.14387844875455 +1.75520042330027 2.04135481268167 +-1.17672529816628 1.87263363227248 +-0.01735720038414 -0.132140222936869 +-1.61478655412793 1.87356888130307 +-0.309962596744299 0.660954315215349 +2.20934700965881 2.22116094082594 +-2.46272791922092 2.46215682476759 +1.80619774758816 2.01026545464993 +-1.43536208942533 0.606022253632545 +-1.73742507025599 -0.508309822529554 +-2.0013825558126 2.3458355255425 +0.533945962786674 -0.452988084405661 +1.28323761001229 -0.608238104730844 +-0.891486573964357 1.12702103704214 +1.51443880796432 0.38937396556139 +-0.929268095642328 -0.225581336766481 +1.33460436016321 1.53977439180017 +-1.91680308431387 1.21642768755555 +-2.64733371511102 2.6652738712728 +0.0621351078152657 1.98383947089314 +-1.58726055920124 0.241737522184849 +-0.011457908898592 1.35303677991033 +1.57809987291694 2.18806220591068 +-1.54139747470617 -1.25315822288394 +1.13442904874682 -0.272605936974287 +-0.516586482524872 -1.81004797667265 +-0.442781109362841 -0.075520571321249 +-1.17415972426534 -1.22637978568673 +0.491233013570309 0.812948312610388 +-0.267092142254114 -0.408651396632195 +2.63292656093836 1.59315424412489 +-1.10870075598359 -1.34610893949866 +-2.58903631940484 1.85086574405432 +-0.0182708576321602 1.2258055023849 +-1.46989291906357 -0.482244189828634 +-0.106721688061953 0.604912262409925 +-1.18913446739316 -0.42220875993371 +1.2769967019558 1.1977108977735 +0.362223550677299 -1.7522339001298 +0.984295953065157 -0.0474527031183243 +-1.33436021208763 0.215641874819994 +-0.699580814689398 0.362101767212152 +2.78464812785387 1.91815891861916 +0.228782624006271 -1.72433909401298 +-1.81500281766057 0.806421231478453 +-1.07556383311749 -0.284285016357899 +-1.56682952865958 0.280237857252359 +-0.977108031511307 0.270161170512438 +-0.167716830968857 -0.971038773655891 +-2.34170935675502 1.65247280150652 +-1.40580565854907 1.23170851171017 +-0.651007331907749 1.09626273810863 +0.940711803734303 -1.69152277708054 +-2.66293790191412 1.69267027080059 +-1.19879751652479 -0.718646947294474 +2.24167885631323 2.77986229583621 +-1.01541655138135 1.47018028050661 +1.41071298718452 -0.0510727576911449 +0.219346575438976 -0.861910942941904 +0.659375287592411 -0.0785036236047745 +-0.280180782079697 -1.42857759445906 +2.24208739027381 1.31328578293324 +-0.405220914632082 0.696090012788773 +0.213812310248613 -1.73564483597875 +-0.593460846692324 0.780115108937025 +1.5017857812345 -0.528658013790846 +-1.27037632837892 -0.10525755956769 +-1.14273864030838 0.346882212907076 +-1.18504957482219 1.27822986245155 +-0.306376982480288 -1.81502694636583 +1.37392682209611 -1.45243813470006 +0.554716359823942 -1.56558381393552 +-1.13874449208379 1.06046610325575 +-1.20791961997747 0.854293014854193 +1.32906229794025 -0.489615269005299 +0.448300585150719 1.24714457988739 +2.73381044343114 2.01813814789057 +-0.354762729257345 -0.289955407381058 +-1.49942932277918 2.75285452231765 +0.100593943148851 -0.95323470979929 +0.176947806030512 -1.20076877623796 +0.956967562437057 -1.39249560981989 +1.9457249417901 1.26027692481875 +-0.750552903860807 -1.24275388196111 +-2.11385520547628 1.77150790393353 +-1.16288026049733 1.22170064598322 +1.22716143727303 -0.606911916285753 +0.109755299985409 0.68184657394886 +-1.73263110220432 1.09293292835355 +1.94530440494418 1.91540180891752 +-1.15004150569439 0.674392331391573 +-1.92224616184831 0.444126687943935 +-1.32409057021141 1.72198455408216 +0.932368628680706 -0.548008780926466 +-1.30022368207574 -1.21816616505384 +2.75695678591728 1.35465694963932 +-0.802638668566942 -1.76418712735176 +0.720328185707331 -0.414885513484478 +-1.1584729552269 0.088740948587656 +1.20938759297132 -1.57025018334389 +-1.53087161853909 0.479916967451572 +-1.77453901991248 0.836235910654068 +1.49006712809205 -1.01675363630056 +-1.04757906496525 2.28137902915478 +-2.88343966752291 1.70166708528996 +-1.67202346399426 -0.508517280220985 +0.799075525254011 -1.06793490797281 +-1.79523884132504 -0.411252174526453 +-0.275138586759567 -0.890490628778934 +1.87303712591529 1.03998792916536 +-1.81883070245385 -0.770597707480192 +2.04085912555456 2.10473658889532 +1.37932920083404 2.77618318796158 +0.159704800695181 -1.78148379176855 +-2.47859755158424 1.89896140992641 +-0.369118455797434 -1.0831924341619 +1.37789061665535 1.15662802383304 +-0.36779011413455 0.391380451619625 +0.653758212924004 0.0292863547801971 +1.29078165814281 2.69200912490487 +1.03547664731741 0.310575537383556 +-1.34879859164357 -0.45894318073988 +1.61296778172255 1.23217060789466 +0.561416443437338 1.21742410957813 +2.06879713386297 1.87893046066165 +0.170414786785841 0.566636126488447 +-0.769214432686567 -0.0738566182553768 +-0.52552243322134 -0.54743380472064 +1.51468607410789 -0.236435241997242 +-1.23681088164449 -0.214474212378263 +0.700137678533792 -0.137890662997961 +0.232495915144682 0.738477788865566 +0.243623040616512 -0.304031766951084 +2.39675883203745 1.46972893923521 +0.632519576698542 1.52202996239066 +-0.649610586464405 -0.916152786463499 +0.470991976559162 0.998199924826622 +0.590636864304543 -1.36877365410328 +0.463722322136164 0.250982224941254 +0.825104437768459 0.333102226257324 +1.39735870435834 1.04174628853798 +-0.206214763224125 -0.378548834472895 +1.08103916794062 0.976098999381065 +1.48400101065636 -0.910066924989223 +-1.85118143260479 0.708690382540226 +-0.565950501710176 0.625560741871595 +-0.823683697730303 0.278628386557102 +-0.873309917747974 -0.754590302705765 +1.55529597401619 1.33674847707152 +-1.74192931875587 2.80977919325233 +-0.714297119528055 0.665078386664391 +1.05709943175316 0.0479051508009434 +-1.71932093426585 0.630604956299067 +1.77689529210329 -0.500825833529234 +0.582010496407747 -1.42905256524682 +-0.319995757192373 -0.548368345946074 +0.396011661738157 -0.377191983163357 +-0.222157120704651 -0.300181284546852 +-1.48089446872473 -1.0813753195107 +0.326244611293077 -0.64968279004097 +-1.50954092666507 -0.486991383135319 +0.817699555307627 -0.498494185507298 +-2.14065649360418 1.60626352950931 +-0.477922696620226 -1.18297161906958 +-1.74971666187048 1.36385659128428 +0.86207827180624 0.186773005872965 +-2.88186955824494 2.03486770018935 +0.0584888346493244 1.06556739658117 +1.41413897648454 0.445815607905388 +1.0246768258512 0.958350408822298 +0.59256686642766 1.41820696741343 +1.44288720563054 -0.0166402049362659 +-0.652561113238335 -0.508260790258646 +0.403811205178499 -0.0542031452059746 +1.86661488190293 1.44520040974021 +1.59227151423693 0.726458314806223 +-1.72743259742856 1.07654907926917 +-0.348737996071577 -1.55616420507431 +1.8226753436029 1.34290179610252 +1.23731549456716 0.0249195769429207 +-0.994962804019451 1.29734811931849 +2.77047290280461 1.60680848732591 +1.71377348527312 2.90838585048914 +0.636910039931536 -1.01009327545762 +0.645866874605417 0.304849199950695 +-0.389709025621414 1.20820839703083 +2.16575931757689 2.53462814539671 +-0.0697886198759079 0.279298566281796 +0.824261005967855 -0.951204121112823 +0.23216649889946 0.1627405770123 +0.151991598308086 -1.73444369062781 +0.889385126531124 -0.474071178585291 +1.78851625695825 2.5013146288693 +2.04314283281565 2.16521344333887 +-0.203443065285683 -1.07459056004882 +-0.344551514834166 -0.139745447784662 +1.34165865182877 1.68604994192719 +0.344412036240101 1.03601999953389 +-0.962462574243546 -0.546079341322184 +-1.83921800926328 1.81084182113409 +0.581896536052227 0.470592677593231 +0.60276248306036 -0.932877015322447 +-1.72010714560747 2.00662796571851 +-0.570069700479507 0.679780177772045 +-1.98096702247858 1.07481580972672 +1.28311189264059 -1.0176078081131 +0.918729070574045 1.04906477034092 +0.190583743155003 -1.30005686357617 +-0.614699587225914 1.60986620932817 +-2.2866636030376 1.55763847380877 +-1.56808182969689 1.50426350906491 +-0.336312301456928 0.476349886506796 +1.86089237779379 0.520292226225138 +-1.31936386972666 0.567550659179688 +1.71694516018033 -0.374033767729998 +0.740244954824448 -0.406750220805407 +-2.63814443722367 2.22630811482668 +-2.56965424865484 1.6463201828301 +1.98521029949188 1.14476111531258 +1.17944970726967 1.0786580555141 +1.61847940087318 1.3662930727005 +2.58270037919283 2.57059525325894 +1.42869425937533 -0.95961806178093 +-2.34599041193724 1.43547162041068 +-1.20825568214059 0.941836193203926 +-2.40933400392532 1.48687404766679 +0.642058733850718 0.510078497231007 +-0.0856090001761913 1.94624486193061 +0.538059320300817 -0.123834766447544 +0.459427136927843 1.70859317854047 +-2.72991582006216 2.2203439809382 +1.37347070127726 -1.18482312187552 +0.860227782279253 -1.33064590767026 +0.197903331369162 -0.0127020329236984 +-1.93055010214448 2.0349305793643 +-0.895426064729691 0.534266371279955 +0.237071663141251 0.909752245992422 +-1.9636841788888 0.189619041979313 +-2.01065504550934 1.4920190423727 +-0.138917092233896 -1.3262345790863 +0.345381032675505 -1.70226432010531 +-0.423854406923056 1.59277452155948 +-1.64821251481771 0.625669792294502 +-1.65669286996126 -0.696639779955149 +-2.48839632049203 1.18793753162026 +1.14425402134657 1.25053752958775 +0.255539800971746 -1.2057641223073 +-1.74013828858733 0.229272618889809 +-1.485151540488 1.66348664462566 +0.213856045156717 0.273590631783009 +1.97715199366212 0.0167334116995335 +-1.07304240763187 -1.277029607445 +-1.69886174798012 1.29075204208493 +-0.319024320691824 -0.261004727333784 +-1.72671138867736 1.54124293103814 +-0.0221957750618458 -0.79931902885437 +0.55357288569212 -1.44415978342295 +-0.694351952522993 -0.871393423527479 +1.19013844430447 2.4456170424819 +-2.05635992065072 2.32129456847906 +2.3470307290554 1.26997547596693 +-2.90147399529815 2.30455252528191 +-1.04202116280794 0.757331594824791 +1.42260562255979 1.14209574460983 +-0.409668564796448 1.17644890770316 +-1.19820533320308 2.13883887976408 +-0.212645776569843 -0.353138439357281 +0.682033237069845 -1.55658942088485 +1.05738754197955 2.09548281878233 +2.52758413180709 1.96773656830192 +1.27787172421813 -0.307180933654308 +0.886197548359632 0.609604120254517 +-0.575805302709341 1.40276233106852 +1.15352774038911 -0.961484741419554 +1.58926624804735 0.0919618159532547 +0.455788765102625 1.56847905367613 +-2.24899875745177 2.4552356004715 +1.75495069473982 1.57018894329667 +1.34150743484497 -0.25926786288619 +-1.42365857213736 2.20648014545441 +0.0890674293041229 0.000287424772977829 +0.715366076678038 1.67587860301137 +0.835227627307177 -0.737543601542711 +0.453055486083031 1.94252448901534 +-2.04303615912795 2.89809890091419 +0.339856572449207 1.64629746973515 +-0.820706374943256 0.273290891200304 +-2.0191794000566 1.76011408120394 +-2.28815577179193 2.48443923518062 +0.724593859165907 -0.114715207368135 +-2.1275205090642 2.53637117519975 +2.79700756072998 2.46201687306166 +-2.91004760935903 1.96860541030765 +-1.37932300940156 -0.360882338136435 +-0.857186183333397 0.562134925276041 +0.956059772521257 -0.380499728024006 +-0.52379085123539 1.88069343194366 +1.86591764912009 0.454356536269188 +0.115508060902357 -1.36556134000421 +2.7944574393332 1.45868691056967 +1.37775932997465 2.77527860552073 +1.33150111138821 -1.03140095621347 +-0.768940471112728 0.222927004098892 +-2.58208280056715 1.46305055916309 +0.785900685936213 1.08905581384897 +-0.776648823171854 -0.141176775097847 +2.47709801793098 1.94248146191239 +2.17319045215845 1.4354276061058 +1.33227280899882 1.65466177463531 +-0.274067986756563 -1.88904051482677 +0.344518221914768 -1.87160620093346 +-0.497877702116966 1.42005406320095 +-1.51158518716693 0.279772687703371 +0.723672553896904 0.926269438117743 +-2.40180294215679 1.4179679453373 +0.0678855217993259 1.12117820605636 +0.344261880964041 1.92204321548343 +0.477840799838305 -0.234347868710756 +-0.937936961650848 -0.781560368835926 +-1.15391851589084 1.29911101609468 +-1.62856407463551 2.42734021320939 +-1.97161433473229 2.73753204569221 +-1.94153457507491 -0.321690138429403 +-0.508317951112986 -1.30991568043828 +-1.06496314704418 -0.931523997336626 +-2.58007526025176 2.34283326566219 +2.82334679737687 1.71734195947647 +0.529734812676907 -0.455270130187273 +1.50503816083074 -0.918114095926285 +1.56300611793995 -0.211321994662285 +1.28373529016972 -1.48647388070822 +-1.30062136054039 0.0196338482201099 +1.20363237336278 -0.65967320650816 +1.53332705423236 1.49214838072658 +-1.71455581486225 2.47656869888306 +1.22900435328484 1.21752382814884 +-1.61753913387656 -0.880302488803864 +0.165655422955751 1.06339140236378 +-2.29668224230409 2.31376715376973 +-1.47021249681711 0.225431978702545 +-2.36213478818536 2.09966435283422 +1.05153727903962 2.25530040636659 +1.17786652222276 1.8012652695179 +-0.0622869916260242 -1.89502627030015 +0.729040823876858 -0.710815671831369 +1.69549231976271 1.85620391368866 +2.1523706279695 1.23775978386402 +1.45786710456014 1.52334039285779 +-1.67810813710093 0.790088199079037 +-1.13307494297624 2.13556850701571 +-1.4520391151309 1.99991558119655 +0.310260649770498 -0.698613714426756 +1.48194794356823 0.469821948558092 +0.313009947538376 -1.93044844269753 +1.77928545325994 -0.772850502282381 +-1.0591834411025 0.753962285816669 +-1.74035127833486 2.73568035662174 +-0.765960302203894 1.33227364718914 +1.2311534024775 1.2338029704988 +-0.338072817772627 -0.536280494183302 +-0.0493500158190727 1.77414136007428 +-0.340701516717672 -0.530244264751673 +1.60373703762889 -0.91962556540966 +-1.3951579220593 1.39883862435818 +1.9614544659853 1.89828164875507 +0.924218159168959 -0.756026979535818 +-0.84309745579958 -1.20304860547185 +0.570804331451654 -0.943850375711918 +-0.699526097625494 0.0223709531128407 +-0.875719040632248 0.196104601025581 +-1.72877121344209 2.60649118199944 +2.78019555658102 2.45823834091425 +1.24220585823059 -0.473383236676455 +-2.71729859337211 1.31745962426066 +2.83184543251991 2.27851599827409 +0.270559083670378 0.492461573332548 +-0.907657071948051 1.70739825069904 +-0.167800299823284 -1.7411447763443 +-0.538969729095697 -1.01378022879362 +1.69823522865772 2.74204523488879 +-0.21199531853199 1.68685633316636 +1.57026622071862 0.0360033512115479 +1.50411317870021 2.48971028998494 +0.931985508650541 -0.215817362070084 +-0.883255280554295 -1.49524860456586 +-1.17391914874315 1.4219076372683 +1.15381288900971 1.85416286066175 +-1.45556512847543 1.09430568665266 +-1.25514779984951 0.438324838876724 +0.0715929232537746 1.80805894732475 +-2.52838743850589 2.2293014228344 +-2.71313989534974 1.84131486713886 +-0.968578819185495 0.421952567994595 +-1.2464666441083 0.689135510474443 +0.535265009850264 1.18374351784587 +0.789308063685894 0.976799592375755 +-2.44606406614184 2.8364293128252 +-1.17782397195697 0.937423657625914 +0.0740038752555847 0.734560642391443 +1.77172403410077 0.438765987753868 +0.246375389397144 -0.400510415434837 +2.78136451169848 1.46182327717543 +-0.55787219107151 0.761340711265802 +-1.71743490174413 0.9940505027771 +1.40826803073287 -1.31733487918973 +-0.453054886311293 -1.0992496535182 +-0.300243500620127 -1.28626397624612 +1.24190063402057 -1.14660358428955 +-0.745532270520926 -1.13779936358333 +0.122467417269945 -1.98192391172051 +0.403695303946733 0.761751793324947 +1.85238265618682 1.72350120916963 +0.910418264567852 1.09340703114867 +0.90762647241354 1.2066346667707 +1.04697046801448 1.52703953161836 +-2.90958639979362 1.96746077015996 +-0.474552132189274 1.82256393134594 +-0.461465947329998 -0.0675218999385834 +-0.690599206835032 1.36647133901715 +-0.20082725584507 0.982700020074844 +-0.354946196079254 -1.80944062769413 +2.24835130572319 2.15761810913682 +2.84661301225424 1.75481754168868 +1.53677939996123 2.42455476149917 +1.59954442083836 -0.60473845526576 +-1.56738646328449 2.8124819919467 +0.0202366821467876 1.56865410134196 +0.731437847018242 -0.213801234960556 +-1.88840515911579 -0.252409566193819 +0.50103310868144 -1.43448544293642 +1.69719592481852 -0.57636359706521 +-1.87281879037619 2.46435150131583 +0.198508266359568 -1.58546353131533 +-0.0107548348605633 -1.56412705779076 +0.0670644231140614 -1.7435343042016 +2.21374751999974 2.62516567111015 +0.185091070830822 0.0558304525911808 +-0.282945096492767 0.064533606171608 +-0.692931637167931 1.23959053307772 +2.44647473469377 1.46035656705499 +0.398416325449944 1.54593525454402 +-1.1274655982852 1.79432262852788 +-1.34709008783102 -0.685161292552948 +-0.162260577082634 -1.72360373288393 +1.20392828434706 0.971559729427099 +-0.952574152499437 0.760222312062979 +-1.51738142594695 0.755897477269173 +-0.0618931315839291 1.54406813159585 +-1.01479284465313 -1.53843135386705 +0.0549571476876736 -1.75545589998364 +-1.12079690396786 -1.4833892993629 +1.22591653466225 2.40811822190881 +-0.794736057519913 -0.6828245036304 +0.760214023292065 -1.50887004286051 +-1.73608003929257 2.00579699501395 +2.05689832940698 2.73117042705417 +1.24496367573738 2.12902756035328 +-1.23987824842334 2.18617404252291 +-2.06657461076975 1.68183764442801 +2.54874340817332 2.25832796096802 +-1.39264031499624 -1.18786812201142 +1.8492073379457 2.8502111621201 +2.47581090778112 2.04724755883217 +-0.39014271274209 -1.2873845435679 +1.36955620348454 0.0219872333109379 +-1.12265603616834 0.875563681125641 +-1.52233500406146 -0.621543228626251 +1.68075265362859 0.630292661488056 +-1.29896451532841 1.73986462131143 +0.593366868793964 -0.411695521324873 +-0.419229831546545 0.839527372270823 +-1.14772886037827 0.251891255378723 +-0.962494451552629 -0.528158757835627 +1.86995922774076 2.46966538950801 +-1.69170490652323 2.24430044367909 +0.616669423878193 1.15330833941698 +0.669159021228552 0.180726017802954 +0.31551568210125 0.61780372262001 +-0.291993021965027 1.94897171109915 +-0.00981470197439194 1.90527640283108 +1.14513040333986 1.13836887478828 +-2.48709348589182 1.72358466684818 +-0.520791474729776 -1.46447251364589 +1.96928956732154 2.06225638464093 +-0.321784723550081 0.0455602891743183 +1.11636908724904 -0.55109853670001 +-0.634184204041958 1.62465716153383 +-0.233535833656788 0.424602206796408 +0.429238602519035 -1.01449808105826 +-2.93475391715765 1.95654537528753 +2.24958636984229 1.16004998236895 +-1.90545721724629 2.44428309798241 +1.84401315823197 1.21068027615547 +0.865958970040083 -1.75781208276749 +1.65166252106428 1.2428173199296 +-0.255253493785858 1.69553644955158 +0.958854775875807 -0.518418971449137 +1.53976755216718 1.84834608063102 +-0.396072089672089 1.53375567495823 +1.14350999519229 1.53722698614001 +1.36081284657121 1.62782852724195 +2.37027900665998 2.75563517957926 +2.79318123310804 2.42714469507337 +1.08130984008312 -1.57426323741674 +1.66437162831426 0.338872872292995 +0.550496183335781 0.0753199905157089 +1.2675904892385 1.16364324837923 +-2.2019915394485 2.6763704046607 +1.13104872033 -1.11603409051895 +-1.24109536781907 1.5648830793798 +-1.22520404681563 0.960967216640711 +-1.2560037150979 0.27281915768981 +1.30608120560646 1.7890408821404 +-0.959182307124138 -1.07047866284847 +1.00773581117392 -0.771922282874584 +-1.85960478708148 2.83260897919536 +-2.36360209435225 1.34929390251637 +-0.21733633056283 1.47324853762984 +-0.326439768075943 -0.872647602111101 +0.244743585586548 -0.28065524622798 +-2.46507734805346 2.08112749457359 +2.12605695426464 1.73076891899109 +2.16134568676353 2.80804978311062 +0.89805331081152 0.643511619418859 +-0.965671237558126 0.855416309088469 +-2.63010683655739 2.35672772675753 +0.268972214311361 -0.947771724313498 +-1.11988061666489 -0.0933411531150341 +-1.90122552216053 1.31674107536674 +-0.737363319844007 -0.862982131540775 +-0.0714111626148224 0.577652707695961 +1.12269151210785 -1.03969479724765 +-1.32399569451809 -1.11271368712187 +-1.00233935192227 0.63940204679966 +-2.35267616435885 1.08056267723441 +-1.34660366922617 -0.164756774902344 +-2.213443800807 2.70522528886795 +2.05172187462449 2.13695424050093 +-2.07727495953441 1.98884826526046 +0.679170109331608 -0.748801223933697 +1.50360184162855 -1.10220401361585 +-1.6019938737154 0.616350516676903 +-1.33445123583078 1.35242111235857 +1.43040638417006 0.53570456430316 +-1.49109214171767 -0.32837437838316 +2.87727523222566 2.09785425290465 +-1.84911384433508 0.57144907861948 +0.567801412194967 -1.67491998896003 +2.33619615063071 1.95260488986969 +-1.69326510280371 -0.400961689651012 +0.0352791845798492 1.30094088613987 +0.915619798004627 1.09820531308651 +1.25542875379324 -0.0693541169166565 +0.153729632496834 1.95820845663548 +2.94619394093752 1.88178231939673 +-0.401465475559235 -0.198477774858475 +-1.2805356644094 -0.992118544876575 +0.380043469369411 1.72641753032804 +-1.60174831002951 2.46971498802304 +0.969146206974983 -0.166711665689945 +-2.22521609440446 1.45425165444613 +-0.00617775693535805 0.861510127782822 +-1.64743112400174 -0.347371086478233 +0.472916126251221 -1.03337186947465 +1.75715745985508 1.32668318599463 +-0.00799329206347466 1.31603561714292 +-1.31249446049333 2.4116744697094 +0.205580618232489 1.98475814983249 +2.60486225411296 1.80450223758817 +0.425120107829571 -0.818385288119316 +-0.302994932979345 0.219054874032736 +0.264994010329247 -0.806065816432238 +-0.187976624816656 0.720968339592218 +0.137975446879864 0.599507216364145 +-1.2191187851131 -0.824776958674192 +-1.05666132643819 -1.61905192956328 +0.93263790756464 -0.494901079684496 +-1.12306211143732 1.09606756642461 +0.0430957525968552 -1.11822626367211 +1.32492009922862 1.65842120721936 +0.548007015138865 1.76564503088593 +-1.12359651178122 2.37319204211235 +-1.65248123928905 1.9153484813869 +1.67410608381033 0.432959243655205 +-0.985819578170776 1.17214288935065 +-0.146475724875927 -0.80176081135869 +-2.02255187556148 1.97097358852625 +-1.83523605391383 2.83727962896228 +0.665113527327776 -0.850884433835745 +1.52938690781593 2.71250328421593 +-0.817462142556906 0.320795577019453 +-0.891031909734011 0.828735444694757 +-0.0749857053160667 1.31830655783415 +-2.46306101605296 2.79506047815084 +-2.58842600509524 1.48355710506439 +1.7514126971364 0.166018925607204 +1.16146966069937 1.31003978848457 +-1.60346744209528 0.373932734131813 +1.58726412802935 -1.1445500254631 +-1.7870487831533 2.56368425861001 +-0.84052037447691 1.0243161059916 +0.0203528627753258 0.488578177988529 +-1.79684330895543 2.55449936911464 +-1.57163673639297 1.0700161755085 +1.448874887079 1.48665998503566 +1.32865772396326 0.157637108117342 +1.6875743791461 0.0803839489817619 +0.105053503066301 1.92537866160274 +-0.59469997882843 -1.89071990549564 +0.518820710480213 0.146228093653917 +-0.124833695590496 -1.50835406407714 +-0.493662144988775 0.674800984561443 +1.52356450259686 -0.600353762507439 +1.53288056328893 2.29829448089004 +2.01232304424047 2.7532178722322 +0.93597774207592 1.58684292808175 +1.61966079846025 0.142074052244425 +-1.18669037520885 0.307359907776117 +-2.28914580121636 1.61424346268177 +-1.29999284818769 1.61958968639374 +-0.891129270195961 -0.533327776938677 +-0.389567270874977 0.746519170701504 +-0.00670206919312477 -1.62156848981977 +1.24555337429047 -0.864229537546635 +-0.482331618666649 1.45330214127898 +0.230390042066574 1.68793528527021 +0.873127032071352 1.37302258983254 +-2.52372413501143 1.94835159927607 +-0.813874743878841 -1.01869543269277 +0.77679455652833 1.08896959200501 +-1.67504642903805 -0.106921821832657 +0.291903775185347 -1.36916213855147 +-2.43771054595709 1.8589288033545 +-2.46786682680249 1.65814634785056 +0.530217580497265 0.845372054725885 +0.209880754351616 1.38565022498369 +0.703180726617575 -0.290726020932198 +1.3365698158741 0.239704616367817 +2.36196250095963 1.74106182530522 +-0.411545384675264 1.14503682777286 +0.444980774074793 -1.71623821556568 +0.915343143045902 -0.428050484508276 +0.112749550491571 0.541710581630468 +-0.485721498727798 1.22561280801892 +-0.874411020427942 -1.0399131923914 +1.45576563104987 2.63782278075814 +0.922623075544834 0.821765393018723 +-2.73803209140897 1.39711666107178 +-2.62049547582865 2.64745804667473 +0.3240787088871 0.272916536778212 +1.07997346669436 0.10303695872426 +1.32372396066785 1.68290766701102 +-0.343684229999781 1.22317755222321 +1.51650222018361 -0.309085085988045 +1.95787892490625 2.71882850304246 +-1.31741741672158 0.648004364222288 +-1.54294418916106 1.02323402091861 +-0.0972655303776264 1.04595500975847 +-2.70880780369043 1.97760619595647 +-0.773748695850372 -1.1261550039053 +1.14361879974604 -1.50083089992404 +-0.121587004512548 -1.74034046754241 +-2.54584395885468 2.07104491442442 +-1.51466218754649 2.51972312480211 +1.37327092513442 1.80274728313088 +-1.49017160385847 1.70048684999347 +-0.925149422138929 -0.225555434823036 +1.83723958209157 -0.0696011111140251 +-1.59884200245142 2.0495010279119 +1.29445549845695 0.592102330178022 +0.500161856412888 1.29837522283196 +-0.934836398810148 -1.46802053973079 +-1.48246657848358 2.72564266994596 +-0.350434001535177 0.793269768357277 +0.953104853630066 -1.50339762493968 +-0.381016679108143 0.238457649946213 +1.07906810194254 -0.561694119125605 +0.718729425221682 1.81120902299881 +0.400729015469551 0.188561830669641 +1.63372889906168 2.26255261152983 +-1.24140768125653 2.23170849680901 +1.28359889239073 2.67979443073273 +-0.335758235305548 -0.299137238413095 +1.47356367856264 -0.753591116517782 +-0.489084422588348 -0.207807570695877 +1.17601937428117 0.24180406332016 +1.08277695998549 -1.41824344173074 +-1.76625803858042 2.21744757518172 +0.242004312574863 1.33824835345149 +0.491838119924068 0.884854506701231 +0.0278752706944942 0.958707053214312 +1.41538896039128 0.326420392841101 +0.9345200099051 -0.516160536557436 +-0.415696173906326 -0.0917170792818069 +0.406807392835617 -1.76961397007108 +-1.2439575009048 0.245933681726456 +-1.92994840443134 0.115829989314079 +-0.610255554318428 -1.03889447450638 +2.13806098327041 2.40833294391632 +1.93133029341698 2.41103690117598 +-0.482324920594692 0.168740503489971 +1.18060983717442 -0.231183782219887 +-1.94393123313785 -0.183919917792082 +-0.834807269275188 -0.274258755147457 +1.45048379898071 0.00803879275918007 +-2.58261808007956 2.33977568522096 +2.18003575503826 1.6171492934227 +0.764932740479708 -0.692431770265102 +-0.799969334155321 -1.70418108254671 +1.30217403173447 2.29246623069048 +-0.911499384790659 -0.704033926129341 +-1.05195765197277 -1.23424106836319 +-2.01126936078072 2.93482532724738 +2.62925488874316 2.32739082351327 +-0.145552270114422 -0.441923446953297 +-1.0164133682847 0.998988397419453 +-1.60884764418006 2.38057390227914 +-2.26579861342907 2.58376869186759 +-0.795840989798307 0.705477006733418 +0.519770611077547 -1.03785872831941 +-0.0416020080447197 0.857353866100311 +1.6854933090508 2.1359210498631 +-0.353308372199535 -0.8988425321877 +-0.601121790707111 1.30408434942365 +0.562646083533764 1.02688659727573 +0.389875300228596 -1.68898304179311 +2.05545522272587 1.28387600183487 +-1.49153061956167 -0.510917570441961 +0.757530558854342 0.15566748008132 +1.29348571971059 -0.492735344916582 +1.33286283910275 -1.08027125895023 +1.56072515621781 0.536772079765797 +0.81975232809782 1.69286083802581 +0.991910576820374 1.98686625435948 +0.712091855704784 -0.534358587116003 +-0.967213351279497 -0.341078229248524 +0.925992399454117 -0.431454695761204 +1.51038961112499 2.85181721299887 +-1.69692747667432 0.7123096100986 +-2.71250121667981 1.49679252505302 +-1.01745468750596 1.25012136250734 +1.6739806830883 -0.9831921197474 +-0.537031352519989 -0.305106677114964 +0.0883884839713573 0.0918169021606445 +-0.802525579929352 1.19040975347161 +1.08179592341185 2.15087576210499 +0.0745567381381989 1.38070503994823 +1.14598342776299 -1.18821378424764 +-2.12779810279608 2.01242501661181 +-1.703613165766 2.62982174009085 +-2.18864918500185 2.61913230642676 +1.38494789972901 1.63137398660183 +0.3755475692451 1.68657617643476 +1.06614830344915 0.641513977199793 +-0.270548943430185 -0.327818781137466 +-0.199150718748569 0.827346198260784 +2.22019278630614 1.32400108873844 +1.74296770617366 1.12087880820036 +2.21764127910137 2.43981948494911 +2.2834388948977 1.67238346859813 +0.887990590184927 -0.663364499807358 +1.52407161518931 -0.690158192068338 +-2.76825782284141 2.07751048356295 +0.719515543431044 0.0620783269405365 +-0.243884015828371 0.485258545726538 +2.24361027404666 1.92753091081977 +0.990557473152876 1.25678056851029 +0.985660422593355 0.834152236580849 +-1.09272168576717 -1.07626446709037 +0.551339093595743 1.86239792406559 +-1.7333163395524 2.00862856581807 +2.85851664468646 1.62852170690894 +-0.24569308757782 -1.25303858146071 +1.26167046278715 1.68834135681391 +-2.64750107005239 2.51516953483224 +0.740404844284058 -0.0786540806293488 +0.352980863302946 -1.00623100250959 +-1.56831208989024 -1.16953904554248 +-0.053593672811985 0.805319886654615 +0.448203723877668 -0.0325142554938793 +1.27044494450092 0.690587133169174 +1.48806804791093 -0.967598631978035 +1.17761722952127 1.12233696877956 +0.0101026259362698 -0.559194032102823 +0.709260407835245 1.19985879585147 +-1.31537134200335 0.423020910471678 +-1.38900775834918 1.32479440420866 +2.11414141207933 1.46693830192089 +-1.0238456428051 2.05455801635981 +1.51082843542099 -0.414404325187206 +0.84085601195693 -0.700677160173655 +0.77681752666831 -1.806993894279 +1.98671853169799 2.71468015387654 +-0.105690956115723 -1.2369986474514 +-1.66311693564057 0.172778379172087 +0.757595963776112 -0.0219898037612438 +-2.53993592038751 2.72471570968628 +0.462672788649797 -1.62209073081613 +1.07559450343251 1.15107545629144 +-0.203494012355804 0.0222720801830292 +1.86862663179636 1.24161462113261 +-2.23988113179803 1.01429580524564 +-0.262878891080618 1.47356936335564 +0.299914475530386 0.615165863186121 +1.14961224421859 -1.56140779703856 +1.735401596874 -0.456840042024851 +-0.526582844555378 -0.131077125668526 +-1.55352548509836 0.792419623583555 +-1.32605922222137 1.49822251498699 +-2.10601576417685 1.92525671049953 +-1.42283717542887 -1.3274452239275 +0.657527968287468 1.03831489011645 +-1.8038373067975 0.461807388812304 +-1.06796494871378 1.13320041075349 +-1.69526695087552 1.82598331198096 +0.716089375317097 -0.282606847584248 +-1.5375838726759 1.75207900255919 +1.89382823184133 1.67822148278356 +2.11809450760484 1.785226367414 +-1.70997512713075 -0.609820947051048 +-1.8796372525394 -0.155513018369675 +1.3116729259491 0.568806059658527 +-0.0342989563941956 -0.0822560377418995 +-1.49408828094602 -0.721802540123463 +1.11769405007362 0.895630534738302 +1.30180840939283 1.5417352616787 +-0.430266618728638 -0.813867043703794 +-0.0718193091452122 0.312009043991566 +0.097915705293417 1.24773037061095 +-2.78287687525153 1.46735619753599 +-0.28649516031146 -0.0648425482213497 +-0.427311889827251 1.41013744473457 +-2.38787797093391 1.44476910680532 +1.98580322042108 1.96508297324181 +1.13532027974725 -0.849503334611654 +2.03529190272093 1.4804508946836 +-1.17037537321448 0.493206936866045 +-0.472225490957499 -0.422915805131197 +0.682085726410151 -0.323150020092726 +-2.31349859014153 2.18026293069124 +-1.28301697596908 1.78996935486794 +-1.7774457000196 -0.911135669797659 +1.78591462224722 -0.161127552390099 +2.46710070967674 2.47988922521472 +-1.40705346688628 0.593788575381041 +-1.63144646584988 -1.03995329514146 +-0.415664453059435 -1.04785621166229 +-1.25881304591894 0.130282506346703 +1.66202214360237 1.15019856765866 +0.729800906032324 1.59387190267444 +2.30366064608097 1.44242330640554 +-2.5605057105422 1.99875178560615 +0.442922230809927 1.21064576134086 +0.32542796432972 1.51928688213229 +-0.900794077664614 -1.68970746174455 +-0.181797165423632 1.4853377379477 +1.49522035196424 0.345912337303162 +2.31555346027017 1.25430102273822 +1.4737935103476 0.483013365417719 +-0.518858268857002 0.301459275186062 +-2.03670025989413 2.12152238190174 +1.42510272562504 2.50975769013166 +-1.14196426421404 2.07707012444735 +0.929580852389336 -1.15562091767788 +2.48210314661264 2.69863298535347 +-1.41762647405267 0.481705617159605 +0.484675079584122 0.0311596058309078 +-2.15877659991384 1.94623680412769 +-0.772989980876446 -1.49250507727265 +1.15163937956095 1.85471335798502 +2.49834866076708 1.55160307511687 +-1.31903513893485 1.95499885454774 +-1.11600451916456 -1.01205840706825 +-2.36147516593337 2.21212596073747 +1.63031893223524 2.51693657040596 +-1.73391414061189 -0.266338337212801 +0.210496738553047 0.4935651011765 +-2.25791275128722 2.5849873572588 +-0.782371692359447 -1.03111720457673 +-1.18238020688295 -0.0239138379693031 +1.13009613007307 1.83260165154934 +2.33159659802914 1.25081048905849 +-1.47402970865369 -1.23795059323311 +2.2532029338181 1.17574341967702 +-1.09911868721247 1.06828270107508 +2.16605700924993 2.77899595350027 +1.28291774913669 0.0744047351181507 +0.216433584690094 0.480195540934801 +-1.40145065635443 0.550289761275053 +-1.93922374397516 -0.408653438091278 +-1.30889908969402 -0.893462814390659 +2.54419414699078 2.38705797865987 +-1.78225905075669 0.1100855730474 +-0.108715374022722 1.32850240543485 +0.279404539614916 1.33085398748517 +0.0978993512690067 -0.0258414521813393 +2.37299030646682 1.77642706781626 +0.708114679902792 1.68077214062214 +2.31399211660028 1.66359844431281 +1.07798966392875 1.01876130700111 +-1.88367050886154 1.81750059127808 +0.664036933332682 -1.4263085462153 +0.675382826477289 -1.49083865433931 +-1.80186311528087 1.586674451828 +0.74841296672821 0.13570074364543 +1.47106958925724 -0.264274034649134 +1.82883140444756 -0.117216046899557 +1.73844607174397 2.01850551739335 +-0.798963502049446 0.727358508855104 +1.63997736945748 -0.435922414064407 +1.16260109469295 0.732356235384941 +-1.89544637873769 0.197346597909927 +1.05227708443999 1.2055250108242 +-2.61280719935894 1.44234056025743 +-0.971887707710266 0.943508755415678 +0.126449126750231 0.054670937359333 +-0.43641597032547 -1.91161715611815 +0.731226228177547 -1.72257101908326 +1.03269800543785 1.14582881331444 +-1.82089724764228 -0.418561842292547 +-0.614222854375839 -0.364959068596363 +-0.914643321186304 1.20726733282208 +-2.53980561718345 1.50861969217658 +-2.01678333804011 1.95596086978912 +-2.22276938706636 2.93229659646749 +1.4974431656301 1.42290173843503 +-0.0150116607546806 1.75020979344845 +-0.431945152580738 1.94267344102263 +0.466271210461855 -1.11263241991401 +-1.5163800381124 1.36766292899847 +-0.793108206242323 -0.872558798640966 +-1.64170522987843 -0.0279953889548779 +-1.12945876270533 -0.250063914805651 +-2.21334528550506 1.94799560308456 +1.53323804214597 1.64074813947082 +2.13348986208439 2.33491558209062 +-1.57388530671597 2.3816179856658 +-1.71621830016375 -0.182542145252228 +-1.37514034286141 -1.13491536676884 +2.1816654317081 1.44684983417392 +1.40323359146714 2.47480245679617 +-2.42163144424558 1.93257354572415 +0.59268993884325 1.83892074972391 +1.77042577043176 -0.830537274479866 +0.0741852186620235 1.38700285181403 +-1.43312840536237 0.161900017410517 +0.127926293760538 -0.768027286976576 +-0.320092614740133 1.32529952377081 +-0.330006130039692 -1.19323901459575 +-1.29456519335508 1.65464882925153 +-0.840964924544096 1.67264892160892 +0.721481613814831 0.766090594232082 +0.32193186506629 1.69785653427243 +-0.605422578752041 1.82995454221964 +-0.671570736914873 1.61391198635101 +0.0370635464787483 -0.343947395682335 +-1.30966550111771 2.14230608940125 +-1.20618708431721 0.0538079366087914 +2.7583094201982 2.06296315416694 +-1.38536395505071 -0.528982661664486 +-0.78228010982275 -0.414675567299128 +-1.5252906344831 0.571005884557962 +0.719327040016651 -0.234343111515045 +1.45193177461624 -0.0694344714283943 +-1.47526678815484 2.38882211968303 +0.738969147205353 -0.285691805183887 +1.29697913303971 -0.296471245586872 +-0.258411783725023 -1.04281263798475 +1.88614055514336 0.08962382376194 +-0.4118357822299 0.0649249143898487 +1.20938654243946 2.27330233156681 +-1.69697584584355 1.54825845733285 +-1.2206517085433 -1.4293856061995 +1.9412750005722 -0.177515920251608 +-1.4496197104454 2.59260034561157 +-0.524329528212547 1.64032132923603 +0.81450866535306 0.835896242409945 +2.67218970134854 1.606351390481 +0.620094824582338 1.75383407995105 +0.0733196623623371 0.611228551715612 +1.40390717983246 -0.532887879759073 +1.33578765764832 0.433958601206541 +0.864585980772972 0.90500034019351 +2.80021293088794 2.20257074385881 +-0.278874509036541 -1.34362604469061 +0.878897171467543 -1.13240515068173 +1.10032492876053 -1.50175405293703 +0.0362830944359303 -1.17919253185391 +-2.78926679491997 2.34774274751544 +-1.53585304692388 0.980076614767313 +2.72390427812934 2.60082242637873 +0.47517017647624 0.433611653745174 +0.299833510071039 0.314290840178728 +-2.0219319537282 2.9608591273427 +-2.61948146671057 1.72987485677004 +1.54933106526732 -0.499851129949093 +2.82238410785794 2.42687290161848 +1.49040845409036 1.56385259702802 +0.427926454693079 0.703829422593117 +1.17620247974992 -1.09448551386595 +2.56758751347661 2.49453084543347 +2.14229164645076 1.37796101719141 +-1.78534733504057 2.39341812953353 +-1.94506637006998 0.126419771462679 +-2.26199268549681 1.75549422204494 +1.34297505021095 2.73791403323412 +-0.391734018921852 0.582174029201269 +0.060985591262579 1.27320780605078 +1.39932095631957 1.48046927526593 +1.5136228017509 -0.880764462053776 +1.12025159969926 1.80386709049344 +0.990228433161974 1.66189654916525 +-0.783374238759279 0.104833673685789 +0.126892536878586 -1.36936685442924 +0.426468923687935 -0.12374259531498 +0.944379020482302 0.806345943361521 +-0.287133932113647 1.92520950362086 +-1.44791214540601 0.730396661907434 +2.16479841992259 1.90879921615124 +-2.53914725780487 1.42632775753736 +1.11009929329157 -0.533871129155159 +1.94001681730151 2.96258935332298 +1.31335978582501 2.44427302852273 +1.77992391586304 -0.0300086364150047 +0.844756573438644 1.73728474229574 +-0.166857048869133 1.9801432043314 +0.610263872891665 -0.882506102323532 +-2.38937521353364 2.8504548035562 +-1.27064424753189 0.0774683430790901 +-2.18247332796454 2.54729849845171 +2.2329400293529 1.58304445818067 +-0.934491876512766 -1.75652326270938 +-2.80943645164371 2.36744144931436 +1.0847540050745 1.44449396058917 +-2.65028564631939 1.24637585133314 +1.08447278663516 1.72630621120334 +0.343810733407736 0.212856825441122 +2.83081963285804 2.14261350035667 +-1.929837372154 2.90514193475246 +-0.0464070178568363 0.878266904503107 +-0.127194184809923 1.6854898892343 +-2.11357330530882 2.87049406021833 +1.38634796440601 -0.391260974109173 +-0.127460401505232 1.21677147969604 +-0.0156036429107189 -1.09130707755685 +1.62712199985981 -0.421645026654005 +1.47674088925123 1.51386068016291 +2.35417438298464 2.32163529098034 +-0.0116150118410587 -0.794620949774981 +-0.307622898370028 -1.44338466227055 +-1.03576342016459 1.24820499867201 +1.57169568911195 0.541240524500608 +-1.73478761315346 2.27899528667331 +1.53361180052161 1.45114694163203 +-0.579326320439577 0.908661995083094 +-0.9835423566401 1.03235363587737 +2.13665336370468 2.69047862291336 +1.63789251446724 -0.917652789503336 +1.43423762172461 1.2202184535563 +0.554851125925779 1.41390469297767 +-0.65320848301053 -0.251029964536428 +1.31626556813717 1.02967993542552 +-1.69844851642847 0.853044766932726 +1.9659324772656 -0.345201902091503 +-2.40428134053946 1.3728354498744 +2.5584572404623 1.3877071775496 +0.303306426852942 0.176814738661051 +-2.34402526170015 1.25393917411566 +-1.80699227750301 0.282158695161343 +0.411395467817783 1.76713740825653 +0.605353768914938 -1.73941902071238 +1.34321531653404 1.1232624463737 +-0.0299203917384148 0.236506268382072 +-2.50871723517776 1.58195620030165 +-0.391013480722904 -0.220087386667728 +0.324532236903906 1.28891732543707 +1.49894702807069 0.213011682033539 +-2.71935324370861 1.96808148548007 +1.84430667757988 1.18527668714523 +1.48482031747699 -1.2694385163486 +-0.876954264938831 -1.00909207388759 +1.56161605939269 1.21689524129033 +-0.8716084882617 -0.33866123482585 +2.27512799575925 2.26722835749388 +0.401045873761177 -0.997413162142038 +0.827658258378506 0.557168569415808 +0.340613096952438 0.963561572134495 +-0.41044395416975 -0.340684961527586 +-0.243032779544592 -1.28248865157366 +2.07113479077816 1.2090025767684 +-0.108085427433252 -0.0788223817944527 +0.708779986947775 -0.45957014337182 +-1.24207141250372 -0.0584121011197567 +-1.07045037671924 -1.28725045919418 +-0.865894515067339 1.68840877711773 +1.37976868078113 -1.42127117887139 +-0.363481026142836 0.171184882521629 +-2.17992687225342 2.06267370656133 +-1.83773909136653 2.05474153906107 +0.880719255656004 0.400161068886518 +-0.978168789297342 0.388723645359278 +-1.84748809039593 2.26412176340818 +-2.63423788920045 1.67467019334435 +-1.81288668513298 2.09014561399817 +-2.44771119952202 1.78953400999308 +1.70994303375483 2.87069730088115 +-1.67156808450818 2.90808896720409 +-2.74469723924994 2.27349286526442 +0.691321507096291 -0.585567146539688 +-1.38042062520981 -0.968053206801414 +-0.39667909219861 -0.353070788085461 +0.398405600339174 -1.20897991210222 +-0.287635784596205 -1.0955511033535 +-1.69928913936019 -0.456927265971899 +1.33867180347443 -1.13685166463256 +1.34834719076753 2.50902118906379 +1.98050229251385 1.51944163814187 +-1.05854647606611 -1.51804677769542 +1.14377752691507 1.84234618023038 +1.53079906478524 2.83065773919225 +2.19661881402135 2.85262946784496 +0.287868835031986 -0.510773781687021 +-2.00835399702191 1.93095866218209 +0.310622058808804 0.75826008617878 +-0.190030105412006 -0.761226858943701 +-0.255338937044144 -0.903009202331305 +2.78517828881741 2.28934032469988 +-1.80742545425892 -0.368785314261913 +1.30166174098849 0.647583734244108 +-0.26687516272068 1.92797658592463 +-2.62649197503924 1.87322207912803 +-1.34476486966014 2.53336980938911 +-0.692341540008783 -1.52812625467777 +-0.0950051583349705 -1.15091420337558 +1.06073168292642 0.776285648345947 +1.35498594120145 0.79409758374095 +-1.38910534605384 -0.595482427626848 +-0.224023744463921 -0.0360421650111675 +-1.16178663447499 2.24546315893531 +0.0655077956616879 -0.416848361492157 +-1.35964253544807 1.09076727181673 +2.44515424221754 1.13928099349141 +0.847987443208694 -1.13649037107825 +1.65683241561055 2.07730574160814 +-2.95222776010633 1.80774870142341 +0.399796206504107 -0.895964369177818 +-2.07583208009601 1.20335067808628 +0.660529419779778 -0.394620813429356 +-0.411162719130516 0.473491478711367 +-0.969328831881285 -0.446278657764196 +0.0237792022526264 -1.55781099200249 +-0.995430421084166 -1.24268966913223 +0.2467185780406 -0.691286470741034 +1.18473277613521 2.42099818587303 +-1.00726762786508 -1.10358987003565 +0.940124910324812 -0.518878433853388 +-0.948247779160738 0.3397979401052 +-0.789551608264446 1.23151097074151 +2.48898810520768 2.59765702113509 +-0.893373291939497 0.431356657296419 +-0.0114492811262608 1.6296511888504 +1.83897611498833 1.37603674083948 +-1.77145553007722 -0.202895328402519 +0.25846840813756 -1.25948973372579 +-2.66517697647214 2.31608524173498 +1.69083123654127 -0.215125788003206 +-0.39653080701828 0.182193029671907 +-1.46714804321527 1.37769846618176 +0.793398272246122 -0.589494001120329 +-2.44178625196218 2.57558770850301 +-1.69578438997269 1.32638847455382 +-1.73130038380623 -0.697628919035196 +-1.45899599045515 2.31066827476025 +0.728508248925209 0.53390396386385 +-2.048349943012 1.76371739432216 +1.0664843916893 -1.47686173021793 +-0.399995774030685 0.341513816267252 +-2.72579349949956 1.91473225876689 +1.6675011664629 0.553028177469969 +1.66565986722708 2.79927109926939 +-0.866074729710817 -0.913385849446058 +1.52837346494198 2.00925659760833 +-1.37585379183292 -0.129483666270971 +0.49079716950655 -1.88735348731279 +0.994381971657276 -0.313912995159626 +2.09871418401599 1.93775032833219 +-0.224189344793558 1.85717357695103 +-1.05355723947287 1.5383498519659 +2.50795531272888 1.6599242836237 +-0.209888704121113 -1.73083022981882 +-0.405090358108282 -0.565305110067129 +-0.930105268955231 -1.21840662509203 +-2.86735175549984 2.40231940150261 +-0.719609744846821 0.374521631747484 +-1.05226128920913 0.505287166684866 +-2.55614624544978 2.06819202750921 +1.6060251891613 1.13624165207148 +-1.67735923081636 2.55246483907104 +-0.446074556559324 -0.679231196641922 +0.916682865470648 -0.0678660497069359 +1.22196538746357 1.88870638608932 +0.597849823534489 0.674730945378542 +-0.704685419797897 -0.258318159729242 +-2.0543662942946 2.02656149864197 +0.891719341278076 0.816141601651907 +-0.118545908480883 -1.25776182115078 +1.2002090215683 1.19628472998738 +-1.05952686443925 -0.962503124028444 +-1.69786906987429 0.951651737093925 +-0.650995850563049 0.634621616452932 +-1.71686647087336 2.03743363916874 +-1.99763114005327 1.72805035486817 +-0.205468993633986 1.85056952387094 +1.02870529517531 0.064549345523119 +-0.146322827786207 -1.89333761483431 +-0.489826455712318 0.535165868699551 +-0.072777833789587 1.65101490542293 +-0.497606657445431 1.06842465698719 +-2.20949180051684 2.34132381156087 +0.848336264491081 -1.17849794775248 +1.57550603896379 2.4611015804112 +0.335405975580215 -1.66653947159648 +1.14221269264817 0.0195551477372646 +-2.31000315397978 2.55993187054992 +-1.72967474535108 0.524641089141369 +-1.36199149489403 -0.719061348587275 +0.118909396231174 0.594151802361012 +-0.481104087084532 0.483846262097359 +1.32291376963258 2.18453773856163 +1.50200623646379 1.61092294380069 +0.645074732601643 1.82741669565439 +1.19223527982831 0.463022213429213 +0.295538507401943 -1.79846029356122 +0.998433597385883 -1.39476082473993 +0.842774197459221 1.49966719001532 +-0.817861273884773 1.49999779462814 +-1.87652073055506 1.19928789138794 +0.749096009880304 1.18500901013613 +1.42933953180909 1.7409652993083 +0.190450336784124 -0.641610156744719 +0.0293982215225697 1.49306548759341 +1.91142379492521 2.87014353647828 +0.465804819017649 1.67435042187572 +1.36204594001174 0.968509033322334 +1.96967791765928 2.60114816576242 +-1.90417007729411 -0.504173867404461 +0.534611448645592 1.45037935301661 +-0.411468416452408 1.12366824224591 +-0.212304782122374 0.0326254516839981 +-2.94109025597572 1.7348535656929 +1.7620582357049 2.95560726895928 +2.36963600665331 2.13563994318247 +0.796562135219574 0.867073815315962 +-1.0738144852221 0.769067261368036 +-0.894149377942085 1.09511521458626 +1.367077216506 -0.578886575996876 +-1.66159275919199 -0.063445907086134 +1.57394280284643 2.70498381182551 +-0.342795513570309 1.46581531316042 +1.76578427478671 0.931401409208775 +1.2406296171248 -0.392780505120754 +-0.470866855233908 -1.25322892144322 +0.85710883885622 1.35114281252027 +1.60290158912539 0.399022199213505 +-1.36654223129153 1.87416058033705 +1.19785878807306 1.79522192850709 +-1.76364934444427 0.674925483763218 +1.8253058232367 1.46455241367221 +1.46532701328397 -0.441478464752436 +-1.3570507094264 1.03697694465518 +1.83215926587582 2.60779842361808 +1.68517207354307 2.01072565093637 +-1.69096244871616 2.16264498978853 +0.284031357616186 -0.572413012385368 +0.186469081789255 1.54226495698094 +1.74049527570605 0.670950286090374 +-2.02896302193403 2.41771272197366 +-0.992539379745722 0.485733367502689 +0.689439572393894 1.64766439050436 +1.13693347945809 -1.06992120295763 +-1.50385212153196 0.705824203789234 +1.31875424832106 1.68195251375437 +-1.43622453138232 1.60683204978704 +1.00236030295491 1.62821625173092 +-0.178943950682878 1.84273086115718 +0.822258949279785 1.5556476265192 +-1.84905136376619 0.692009579390287 +-1.14245877414942 -0.504500448703766 +-1.16809325292706 1.63011059910059 +-0.510792605578899 0.568752486258745 +0.0902158506214619 -0.156237129122019 +1.65943833440542 1.95333684235811 +1.6490728110075 -0.830575123429298 +0.842296838760376 1.80991211906075 +-0.976201076060534 1.61962333321571 +-2.03071438893676 1.97334095463157 +2.51917824521661 1.81989790871739 +1.45615021139383 2.3375513330102 +1.66225219145417 1.48205820098519 +-2.69205501675606 2.13127242401242 +-0.28161371126771 0.360685806721449 +-1.06875893846154 2.08794489875436 +-1.30800876393914 2.23485289886594 +-2.56489435583353 2.62227062135935 +0.826839290559292 -1.24861378967762 +0.54273496940732 0.41540090739727 +-0.967941235750914 1.52576611936092 +-1.14812186360359 -0.0538490042090416 +1.83834745362401 2.29895519465208 +1.51831766963005 -0.31090060994029 +-1.26423227414489 -0.692978259176016 +0.272558357566595 1.03568541258574 +-1.97787779197097 1.48064244538546 +1.52979004383087 0.813436396420002 +-1.53281265124679 2.59598413109779 +-1.04855858534575 -1.64809196069837 +0.696533568203449 -1.3451510630548 +0.372460730373859 1.33964887261391 +0.72839741781354 -0.147969048470259 +1.80492720380425 0.00995176658034325 +1.1327454559505 -0.371099308133125 +-1.11069730296731 2.03052124381065 +-0.806988656520844 1.6305853202939 +0.339484926313162 -1.95975989848375 +-0.42725782841444 1.37876681983471 +-1.03579676523805 -0.822318635880947 +-0.835364256054163 0.118381723761559 +-1.69017662480474 1.56672967597842 +0.352252677083015 -1.29679801687598 +-0.407680682837963 0.732872687280178 +1.51553176343441 1.48318805545568 +-0.110122289508581 1.30076130107045 +0.874081172049046 -0.11766704171896 +-0.118740659207106 1.01879732683301 +-0.214172098785639 -0.491531331092119 +2.24064241349697 2.87983257323503 +-0.415952708572149 0.371467906981707 +-0.203842297196388 0.960098944604397 +1.2062905356288 -0.699965976178646 +2.74278998374939 1.86408155411482 +0.320603862404823 -0.159329816699028 +-0.0153518579900265 1.08581601455808 +2.19744473695755 1.01873058453202 +0.768358811736107 1.52204347029328 +-1.28730553016067 -1.31758530437946 +2.73946379125118 2.01137364655733 +-1.45535847917199 0.324210744351149 +-0.56381393969059 0.775717336684465 +1.23908796533942 0.78735040128231 +2.14329965040088 1.59293360263109 +1.90373154357076 2.53903841599822 +2.5945466645062 1.44555818662047 +0.20230470225215 -1.37407327070832 +-0.143388524651527 1.58482145145535 +0.478755515068769 1.40711262449622 +1.79617174342275 1.37725790590048 +2.66346601769328 2.44661457836628 +-1.38583933189511 -1.09997988119721 +0.834720861166716 -0.0740629248321056 +-1.31419216096401 1.52753995358944 +-1.852643661201 0.139946516603231 +-1.00235583260655 -0.30242482945323 +-1.99337348341942 2.84662327170372 +1.14156935364008 -0.324354536831379 +1.84718488529325 0.0352894403040409 +-1.96368502080441 2.57954894006252 +-0.119997628033161 0.123996611684561 +-1.30024753510952 -0.101663548499346 +-0.168753363192081 1.18241372704506 +2.93763614445925 1.95625750347972 +-1.41224979609251 -1.24899871274829 +-0.0054188072681427 1.74759991466999 +-1.03154782950878 0.435133077204227 +0.839729141443968 -1.45748181268573 +-0.517120651900768 -1.72570560872555 +1.26823446527123 0.853403646498919 +-0.436354514211416 0.444917909801006 +-0.29560436680913 -1.28405167534947 +1.38472352549434 -1.38490027934313 +1.22257509082556 -0.216967049986124 +0.988179478794336 1.65502151846886 +-2.54481798782945 2.64023477211595 +-1.69630657508969 -0.620360881090164 +0.975335337221622 -0.261935099959373 +-1.27413498610258 1.15585869923234 +0.206136699765921 1.72873165458441 +1.20638193935156 -1.38504011556506 +1.71863244473934 0.75810419395566 +1.59893619269133 1.30716761946678 +-0.0872636735439301 1.75481424853206 +-0.396535135805607 -0.98787384852767 +0.942408345639706 -0.857676532119513 +-1.60925663262606 -0.800327889621258 +0.766681645065546 1.38158478587866 +-2.88374525308609 1.80483250692487 +-1.60104274377227 -0.656006369739771 +0.63156796246767 0.275336101651192 +0.436904773116112 -0.424379881471395 +-1.12482491508126 0.109235595911741 +1.1510897949338 2.26205512136221 +-0.919150307774544 -0.338728349655867 +-1.32145115733147 2.33375080674887 +1.55333189293742 2.20352626591921 +2.00664648413658 1.59387443214655 +1.38568602502346 1.16200558468699 +1.51604575663805 -0.264383304864168 +1.41702525690198 1.18318856880069 +-1.05505332350731 2.15301766991615 +-0.358058448880911 -1.4770181439817 +1.5484751239419 0.169570002704859 +-1.90984745696187 1.11906500160694 +1.06544123962522 0.451184146106243 +0.698303736746311 0.779252018779516 +-1.89674927294254 2.85876985266805 +0.663066368550062 1.66516718268394 +-0.187380358576775 1.42472422868013 +0.61217688024044 0.0304476171731949 +-1.00905916467309 -1.15516988188028 +0.260148886591196 -1.2026321105659 +-2.29356816038489 1.65352784097195 +1.41498181223869 2.55915705114603 +0.546138528734446 -0.960829388350248 +1.08066956326365 -1.61701476201415 +0.986718844622374 -0.00140663981437683 +0.383860133588314 -0.538665536791086 +-0.900959778577089 -0.344505857676268 +0.305624064058065 1.72203767672181 +-1.34755127504468 -0.570920672267675 +-1.42425990104675 2.66224671900272 +0.489251203835011 -0.969876617193222 +2.39122373610735 2.12566673010588 +1.87761353701353 0.161246441304684 +1.21593409404159 -1.39191134274006 +-1.10211608186364 -0.405689969658852 +-0.80924753844738 0.366683252155781 +0.879422273486853 0.943750932812691 +0.583170592784882 0.918510638177395 +-0.969998840242624 0.598363827914 +1.53621461614966 -1.06716960296035 +-1.03470307216048 -0.632836308330297 +-2.80318377539515 2.4741077311337 +-0.924252562224865 -1.66891806945205 +-0.246250450611115 1.13012455776334 +1.9019998498261 -0.465613212436438 +-0.417678818106651 -1.03416936472058 +-0.968777470290661 1.2820307277143 +2.00996989384294 1.8973821029067 +-0.243026554584503 0.406995803117752 +2.20470736548305 2.5316822193563 +-1.88890985772014 2.85923311859369 +-1.00111996382475 0.605536110699177 +2.59369395673275 2.66254781186581 +-2.60133486241102 2.24423136934638 +-1.19778894633055 0.537084687501192 +2.93184942752123 1.74764718487859 +0.923353426158428 0.0389558300375938 +1.26853878051043 1.79650514572859 +-0.518099043518305 0.617211516946554 +-1.12411794438958 -0.800968367606401 +-2.14840409532189 2.00766563042998 +-0.996197186410427 -1.35240441188216 +-1.48294937238097 2.23742739111185 +0.968910474330187 -0.374919943511486 +-0.738123778253794 -0.619868505746126 +0.945061028003693 0.873564630746841 +0.171944085508585 0.470166921615601 +1.32497907429934 1.97018453478813 +-0.136439260095358 0.24253448843956 +2.28391725197434 1.03662192821503 +-1.91333835199475 -0.0480712279677391 +-1.34333511814475 2.00466987863183 +-0.166630610823631 1.89760782942176 +1.0279202349484 -1.13903166726232 +0.975406661629677 -0.124645859003067 +1.67279401421547 0.925769589841366 +-1.94899424910545 0.208681590855122 +-1.75259163230658 -0.618096582591534 +-2.3955903314054 1.95954241231084 +-0.446101505309343 1.50143673270941 +1.66096949949861 1.21293381229043 +-0.543239910155535 1.79903465509415 +-2.38803862035275 2.36852484568954 +-1.73078320920467 2.51394088938832 +-2.50258870795369 1.66467072442174 +-0.0782664828002453 0.486649684607983 +1.19178538769484 1.79113034904003 +-0.317793242633343 -0.896779026836157 +-1.95786874368787 0.0139664188027382 +1.40688188746572 0.776551216840744 +1.04709561914206 0.0813212543725967 +1.64535471796989 1.6262321062386 +1.18573338910937 0.536393497139215 +0.701537173241377 -0.638793349266052 +-2.25342507660389 2.04687113314867 +1.42687803134322 -0.366667557507753 +-1.22290619835258 -0.614000741392374 +1.71993091702461 -0.700180523097515 +1.90710308030248 -0.0727968811988831 +0.796060774475336 -0.900468502193689 +-0.568262230604887 -1.25835485383868 +-1.93757561221719 2.63717301934958 +0.292537242174149 -0.864445213228464 +2.30495481193066 1.42506176605821 +-2.1223965100944 2.75947442278266 +0.295025359839201 0.0513396263122559 +-1.81718950346112 2.52630697563291 +1.37428697198629 0.997273214161396 +1.6294732503593 0.941270407289267 +-0.435313690453768 1.19484461471438 +1.88727264106274 1.37548463791609 +-1.71844590455294 2.46764164790511 +2.52898050099611 1.71197479590774 +-2.67951805517077 2.05714770033956 +1.15363360196352 0.510882560163736 +1.63217231631279 1.44532474130392 +2.89793423935771 1.93908118084073 +-0.874428316950798 1.17062466219068 +-0.775780286639929 -1.7684477083385 +0.254469126462936 -0.000789850950241089 +1.47978096455336 0.476538967341185 +-0.243857368826866 1.73760278895497 +0.591321937739849 0.991762358695269 +1.50001657009125 -0.117703784257174 +0.61852628365159 0.693893823772669 +-1.81195368990302 2.21055035293102 +-1.85605587437749 1.82814200595021 +-2.48879143223166 1.31928436085582 +-1.37721335887909 1.69532751664519 +-0.813182186335325 -0.389913730323315 +2.39468919113278 1.15957869589329 +0.534184690564871 0.289443306624889 +-2.36266994848847 1.2572060264647 +1.3303911164403 0.30174845457077 +-1.38196036592126 0.409206200391054 +2.35846554860473 2.24114617332816 +-0.914730504155159 1.28888985514641 +0.183831825852394 -0.146039597690105 +1.26977514848113 1.77882845327258 +-2.44739438220859 2.33298860862851 +-1.74329531565309 1.35688468441367 +-0.929636225104332 0.920690514147282 +-0.494702156633139 -0.813763901591301 +-0.0661061853170395 1.89849789813161 +1.47438013553619 0.193619310855865 +1.58672252297401 0.733369510620832 +-0.351892650127411 0.284267034381628 +1.75767408311367 1.57967934384942 +-0.0640014037489891 -1.34495693445206 +-0.862876426428556 -0.317366231232882 +2.25794350728393 2.42369047552347 +1.7709633409977 -0.555331382900476 +1.6873746663332 0.740245223045349 +-0.17850324138999 -0.340132746845484 +-0.860036619007587 0.968185175210238 +-0.514636486768723 -0.682563856244087 +0.505094725638628 1.5740845836699 +1.55542559549212 -0.706669516861439 +1.77699332684278 0.396493799984455 +1.28156410157681 -0.298988975584507 +0.395728535950184 -0.234021820127964 +-1.20975178480148 2.48341897130013 +0.799201633781195 0.131150975823402 +0.476948041468859 1.32324445992708 +-0.992352154105902 0.210497617721558 +-0.342820681631565 0.314458131790161 +1.48426842316985 -0.39131597802043 +0.350065391510725 -1.54851989075541 +1.44512277841568 -1.03753179684281 +0.271916903555393 -1.02282145991921 +0.0359596163034439 -0.484077002853155 +-1.82913268730044 0.668701436370611 +1.88482846692204 2.68528117984533 +-0.858602471649647 -1.78085525333881 +0.724907800555229 1.70384765416384 +-1.41043975576758 1.87408371269703 +-0.826202359050512 0.869140725582838 +1.91067579761147 1.13892366364598 +-0.59949029609561 0.764587610960007 +-1.24684447422624 2.4466005936265 +-0.686483476310968 1.68860962986946 +0.485084772109985 1.69869395345449 +1.14999643713236 -1.10347502678633 +0.685935221612453 -0.944097194820642 +0.0572578944265842 0.657803799957037 +-0.702888116240501 1.38594684004784 +1.99081226810813 2.48890936374664 +-1.34413227066398 0.41770676523447 +-0.393703136593103 -0.915893383324146 +0.0851183570921421 -1.93488254770637 +-1.76424411684275 0.0818308033049107 +0.456221580505371 -0.0236069895327091 +-2.89251169189811 1.73848870024085 +2.27799066901207 1.97804321348667 +1.88997600227594 0.399365697056055 +-1.50941341370344 0.34359584748745 +2.11920883506536 1.7085314206779 +-0.114053167402744 -1.29844237864017 +-0.628813672810793 0.976438838988543 +-2.63483680784702 2.5202415920794 +1.85074241831899 1.78484361618757 +1.8484637401998 2.53018717467785 +0.471301104873419 1.28805090859532 +0.847472358494997 -0.898552682250738 +0.646543309092522 -0.16012892127037 +-0.952230434864759 0.766324136406183 +0.614107564091682 -0.99101035669446 +0.345127291977406 1.07886619493365 +-0.195697739720345 -1.05000903829932 +-0.380752563476562 -1.73475751653314 +1.42265178263187 2.02415054291487 +2.74424086138606 2.10925237834454 +1.51389836892486 1.02285511791706 +-0.0243615470826626 -0.544542454183102 +-1.03418653830886 -1.3781758248806 +-0.871465746313334 -1.16859907656908 +1.34217870235443 -1.1608718521893 +1.6652176566422 1.35767546668649 +0.674173839390278 -1.12706139683723 +-1.14613376930356 -1.51314881816506 +-0.612754043191671 -0.907412678003311 +-2.46426976472139 1.84816674143076 +0.201469924300909 -1.05186136439443 +-0.422686669975519 0.790157876908779 +-1.88278864324093 0.270567934960127 +1.08508414775133 -1.11333323642612 +-0.0922389291226864 -0.533447869122028 +1.30988063290715 0.458867628127337 +0.22495498508215 -0.683922026306391 +0.7210941798985 -1.60799758508801 +-1.52406945824623 2.73538257926702 +-0.81558121368289 -1.3732994236052 +-0.425842612981796 -1.20989165455103 +1.91299761459231 -0.568206589668989 +2.01833194121718 2.39916389808059 +-2.39010912179947 1.42142435908318 +-1.74438965320587 -0.0919308848679066 +0.667577587068081 -1.6644991338253 +0.779206871986389 -1.69361850991845 +-1.72693637013435 2.60676474124193 +0.55147647485137 1.6550363227725 +1.45989297330379 -1.05599289759994 +0.0830420926213264 1.1966425254941 +-1.27489920705557 -0.112825509160757 +-0.908308487385511 0.995205584913492 +1.95256850495934 2.94590635225177 +1.43996588513255 -1.07236932963133 +1.20131640508771 -0.742516659200191 +1.71982816234231 1.33500841632485 +2.88549350574613 1.94913253933191 +0.763437524437904 1.73916415497661 +0.574139315634966 -0.420364756137133 +0.791226744651794 0.102843396365643 +2.3750647008419 1.30363932624459 +1.09366819262505 1.34090814739466 +-0.884736098349094 -1.63358797505498 +0.53565776348114 -1.89953406527638 +1.12863880395889 -0.939692486077547 +-1.53254697099328 0.530098877847195 +0.748308002948761 0.629435177892447 +0.654815282672644 -1.76358873397112 +-0.393861476331949 -0.799084816128016 +1.08036127313972 1.92055021598935 +1.29661412164569 0.152120597660542 +-1.94103948771954 1.51171483099461 +-0.128262355923653 0.21882926300168 +0.572146777063608 -0.657472055405378 +1.37985510006547 -0.839087370783091 +0.916110672056675 -0.797529678791761 +1.38602843880653 2.2193302847445 +1.25059646368027 2.10587720200419 +-1.34656838700175 0.339540153741837 +0.226668391376734 0.712495904415846 +0.270090781152248 0.00950812548398972 +-1.1902117356658 2.35396900773048 +-1.63628279417753 2.84932683035731 +-1.08416346460581 0.418444562703371 +-1.9215960688889 2.13578252494335 +0.618572555482388 1.12661247327924 +1.12226746976376 1.89789623767138 +1.54118473827839 0.77468553185463 +-1.95916248485446 2.48849951475859 +1.6124264113605 -0.422878809273243 +2.69668290391564 2.11427798122168 +1.86908978223801 -0.490568544715643 +1.6840731613338 -0.993116207420826 +1.74183012917638 1.42872476577759 +-0.411080900579691 -0.817828830331564 +2.82886799424887 2.03234412893653 +1.77454587444663 2.51223191618919 +-2.31687952205539 2.02331018820405 +-2.17064027488232 1.09915795177221 +0.523413483053446 0.473853029310703 +-0.638563267886639 1.68507878109813 +0.984816689044237 0.069355633109808 +-0.933146186172962 1.6705283112824 +0.71813366189599 0.485214486718178 +-0.843744456768036 0.0497310347855091 +2.90178822353482 2.15601041540504 +-2.10211572423577 1.12856120616198 +-0.0507114380598068 -1.42241524532437 +-1.79691607505083 1.89023927971721 +0.959088746458292 1.05992981418967 +-1.85609507188201 1.14761648699641 +-2.26836796849966 1.54121755063534 +-1.96691389009356 -0.139477670192719 +0.0806708969175816 0.793941393494606 +-1.1332274787128 -0.334226533770561 +0.0401289910078049 -1.04977475479245 +-1.41491392254829 1.65214296802878 +1.76684099435806 0.0535396300256252 +1.48044414073229 0.301903005689383 +-0.024097204208374 1.42787960544229 +0.878896530717611 1.11838189512491 +1.40061384811997 -0.167424369603395 +-0.488874722272158 -0.697493743151426 +-0.267077516764402 1.38813158124685 +-1.25714904814959 -0.544302802532911 +-0.507697250694036 -1.05634210631251 +-1.55468520894647 0.376363899558783 +-2.16683458536863 2.25157957524061 +0.281365387141705 -0.143860466778278 +0.288190789520741 -1.92122063413262 +0.518973927944899 1.50704806298018 +-0.245032917708158 1.71089277043939 +-2.68925501033664 2.41563714295626 +1.31889071315527 2.65617146342993 +2.17284151539207 2.51147160306573 +-1.10055869817734 2.272013053298 +-1.32014496251941 -1.21443381160498 +1.06016584485769 -0.210896413773298 +-2.25808301568031 1.14688080176711 +0.975884988903999 -1.47191211208701 +-1.63214645907283 -0.215129230171442 +-2.53789008408785 2.62086869776249 +-1.70535764470696 1.13262339308858 +0.0551386214792728 -1.70519518107176 +-0.442782033234835 0.935897544026375 +1.92194256559014 0.390244331210852 +-1.1009604036808 0.386501774191856 +2.91439870372415 2.06311585754156 +0.337399885058403 -1.0453582033515 +-1.30170544609427 -1.00029923394322 +1.20396683365107 -1.3282556347549 +1.13813726603985 -1.18326728790998 +-0.689709939062595 -1.87015073001385 +1.74858369678259 1.70130651816726 +-2.59149133041501 1.29378889873624 +0.474475856870413 -1.92297760769725 +1.4278754927218 2.0499471463263 +-1.20605953037739 2.54013102501631 +0.923003181815147 0.264069519937038 +1.84471410140395 0.991089060902596 +-1.37053893506527 1.42480167001486 +-1.53398653492332 -0.694393869489431 +1.2887154892087 -0.784375056624413 +0.640285693109035 1.62643254175782 +-2.27647408470511 1.55260406062007 +-0.299826052039862 0.635927557945251 +0.881874818354845 -1.2718690559268 +1.59365097805858 0.811991211026907 +0.403117448091507 -0.502604890614748 +0.74269599840045 0.382173452526331 +-0.319134410470724 -1.82321806252003 +-1.49584221094847 2.2715064547956 +-1.00194687768817 0.409667234867811 +2.07867283374071 2.05325496941805 +0.232213128358126 1.69324882701039 +-0.049113467335701 -0.0393387340009212 +0.777842655777931 1.21763714030385 +-1.07663184776902 1.95193273946643 +2.16384015604854 1.44214285537601 +-2.25962799414992 1.48375824838877 +1.4239885546267 1.71777135506272 +-1.36490358784795 2.0131533369422 +-2.22209000214934 1.19982708245516 +0.82531526312232 -0.278329316526651 +-1.70545051619411 0.860052708536386 +-0.357414375990629 0.941951371729374 +1.10000788420439 -1.37199730426073 +-0.0570443794131279 -1.97479598969221 +-0.812472648918629 0.0831182077527046 +1.80845773592591 1.99209493771195 +1.32344111800194 -0.538439873605967 +-0.357200480997562 -1.71669362857938 +1.53186367824674 0.315534025430679 +0.82576085627079 1.73452794924378 +0.219079677015543 -0.404678825289011 +-2.33490384370089 2.33919684588909 +-1.19817236810923 1.55901867523789 +0.716684281826019 -1.24050599336624 +0.397743690758944 0.00913381949067116 +-0.835496604442596 -0.0151680894196033 +-0.0634462684392929 1.58912302553654 +0.900984782725573 0.758612770587206 +-1.85306737944484 0.319148607552052 +2.09138020128012 1.37290551140904 +-0.661753442138433 -0.941579073667526 +-0.608980346471071 -1.50077041983604 +1.17374515160918 0.831169582903385 +1.31787206232548 2.3420607149601 +-1.07449563592672 1.96222720295191 +1.09921019524336 -0.449164088815451 +1.26344970241189 1.43923228606582 +-0.216418702155352 -1.00634315982461 +1.25332662090659 -1.31415799632668 +0.0527378357946873 0.759414691478014 +1.18124564364552 -0.0490584969520569 +1.22006125375628 0.9792794957757 +0.695145547389984 0.617221396416426 +-0.254801765084267 1.1906960234046 +-1.28299098089337 0.642135489732027 +2.45746229588985 1.70496477931738 +-1.17332484200597 -1.59307746961713 +-1.9483567327261 1.16866063699126 +1.02704747766256 -0.011035967618227 +-0.246433570981026 1.05056077241898 +0.672275342047215 0.517486501485109 +2.17723604664207 1.25270359963179 +1.08011062815785 -1.47078445926309 +1.36173336952925 -0.847719602286816 +1.30454055964947 1.68074179440737 +-1.06599236279726 1.10785324126482 +-0.838723484426737 -1.13782396912575 +-2.44895866885781 2.47229275479913 +-2.05379516631365 2.39817933738232 +-0.601157538592815 0.903928644955158 +-0.396757740527391 0.354050010442734 +1.28343425318599 0.816341821104288 +-1.74744001403451 0.783317513763905 +0.767092797905207 -0.600551724433899 +-1.38730107620358 2.3730147369206 +-0.636607553809881 0.923654016107321 +1.61563485115767 1.73009637743235 +2.11377661302686 2.3664588406682 +-0.757848646491766 -0.156943991780281 +-2.69702971354127 1.57283791154623 +-2.26730696484447 1.05249204114079 +2.19485745579004 1.754384547472 +1.15237063169479 1.72530112415552 +-1.64448521658778 1.87912366166711 +0.0320996046066284 0.480465646833181 +0.182397276163101 1.4416377954185 +1.62722728773952 1.43145428225398 +-1.48904189839959 -0.0826472714543343 +-1.08972382918 0.00926142185926437 +-2.68728789687157 1.49247267842293 +-0.757184777408838 1.53481579571962 +-0.833796966820955 0.443349592387676 +0.478429917246103 -0.800728984177113 +0.722370959818363 -0.79191180691123 +0.403286144137383 0.462460938841105 +-1.39579147845507 -0.724067036062479 +0.444058105349541 0.818816624581814 +-1.68277539685369 2.22070788219571 +0.838686894625425 0.6393784545362 +-1.91486190259457 0.0253846943378448 +0.600966982543468 -1.82436042279005 +-1.277842245996 -0.118559155613184 +1.76276523247361 1.68960708752275 +-0.99296984449029 0.424704868346453 +1.18724594637752 1.34375468268991 +2.37514391541481 2.23675296828151 +0.0116711668670177 0.188089456409216 +-1.8155354373157 -0.765440158545971 +1.69455172866583 2.68337533250451 +-1.59722140058875 2.86907280981541 +-1.32735952362418 2.59759429469705 +0.0695234052836895 1.56843637302518 +1.60075604915619 -0.21474239602685 +1.39565305039287 0.347094155848026 +0.427820183336735 -0.0469322092831135 +1.32686518877745 -0.921629123389721 +-0.121726859360933 0.32896738126874 +-1.64968452230096 -0.955181710422039 +-1.48030536994338 -1.03632619231939 +2.09005613252521 2.37288067117333 +-1.66290573403239 -0.27161306515336 +-0.574966479092836 -0.442513599991798 +-1.61015925183892 -0.237352445721626 +-0.89203791692853 -0.222680982202291 +0.401835411787033 0.238728575408459 +2.32810546457767 2.73514260724187 +2.13840274512768 2.72887314856052 +0.342562105506659 -0.669605202972889 +-1.42675496265292 1.05409230664372 +2.1860903352499 2.09139884635806 +0.177124235779047 -1.15622011944652 +0.797249980270863 1.4084002636373 +1.33378928527236 0.105814483016729 +-1.47985967993736 0.860745962709188 +1.44199556112289 2.72767503932118 +0.240204323083162 1.08703753352165 +-1.37344567477703 2.04781191051006 +0.638807997107506 -1.77248014882207 +1.12619720026851 1.58048415929079 +-0.736825603991747 -0.0648382604122162 +0.791943285614252 1.8357020393014 +-1.93764190003276 2.1579017713666 +0.183952249586582 0.720884148031473 +-0.574784457683563 -1.76957397907972 +1.5601919144392 -0.0849664099514484 +1.51056436449289 -1.08003137633204 +0.300978627055883 0.930966433137655 +-1.27740582823753 -1.43966552242637 +-2.33089292421937 2.57113075256348 +1.36688715964556 -1.17987874895334 +0.0270423777401447 0.124489039182663 +0.47801761329174 1.86993185058236 +-0.705462254583836 1.81708582863212 +0.907675970345736 1.0867829695344 +0.958536431193352 -0.456652037799358 +-1.68811328709126 1.87701383978128 +1.65034692361951 -1.05884345248342 +2.63114537298679 1.61032423749566 +-1.77697109431028 0.176823940128088 +0.598320171236992 0.619921676814556 +1.88289631158113 2.31373123079538 +-2.4466074667871 2.34626325964928 +1.1007204093039 2.22988183051348 +-0.737929683178663 -0.263063512742519 +2.55732155591249 1.27699478715658 +0.816300697624683 -0.566114872694016 +0.181062240153551 -1.68932795152068 +-0.0829509943723679 0.236184768378735 +-1.13077882304788 1.12926836311817 +1.05313351377845 1.37948416173458 +-1.07230304554105 1.67760471254587 +0.529088359326124 -1.3889919295907 +1.12233648449183 -0.999941252171993 +2.18413662165403 2.46712590381503 +1.82675424218178 0.802689839154482 +-0.31131449714303 -1.51827676966786 +-0.808601349592209 1.54608147218823 +0.820773400366306 1.28032238781452 +-1.36505043879151 -0.121335104107857 +1.00641019642353 -1.31327236443758 +0.0895601324737072 -1.26932215318084 +0.814994908869267 0.195346876978874 +1.92312697321177 0.137690834701061 +-1.27207358926535 2.08454034104943 +-0.367964740842581 1.03783335164189 +2.32428394258022 2.28126840665936 +-1.00152835249901 0.739853352308273 +-2.67709967494011 2.02842901274562 +-0.317531622946262 -0.109149869531393 +-0.0703591071069241 1.77577510103583 +1.74775115028024 -0.400639072060585 +-2.83479559794068 2.44857370480895 +-1.14279606193304 0.562383472919464 +0.11664829403162 -1.91595252603292 +-0.870340034365654 0.443951494991779 +-0.154738746583462 1.73228726536036 +-1.50423948839307 2.01367106288671 +1.48824631795287 1.58367779105902 +0.237586673349142 1.08566350862384 +0.0403612852096558 -1.93814538046718 +-1.37780929356813 -1.41751243546605 +-1.66037364676595 1.79575973004103 +-1.1087427213788 -0.116405360400677 +1.02777711302042 0.609361436218023 +-1.78872579336166 0.584985569119453 +-1.43333660438657 2.62718802690506 +-0.689389616250992 0.691845260560513 +-0.453542578965425 -1.25610475242138 +-0.117369778454304 1.50894331559539 +-0.380393926054239 0.606017965823412 +1.03743567317724 0.282101418823004 +-1.01192846894264 0.0839029140770435 +0.705929588526487 -1.11110743507743 +1.53036653995514 -1.03417452052236 +0.232523433864117 0.776294965296984 +1.55912357196212 1.66112692281604 +0.616195481270552 -0.0380712412297726 +-0.421999789774418 0.63932940363884 +0.0344843827188015 -1.98754503205419 +1.28721609711647 2.51356943324208 +0.402174923568964 -1.45315268263221 +1.50819424167275 -1.15865696966648 +0.063001174479723 -0.630236987024546 +0.974642738699913 1.16951548308134 +-2.94578353315592 1.78955787420273 +-1.19469363614917 -1.0947123542428 +0.166334897279739 1.50383788719773 +-0.856311362236738 -0.0626157857477665 +-0.718085672706366 -1.51099406182766 +0.525471851229668 0.334403157234192 +1.0380276106298 0.342794377356768 +-0.367426916956902 -0.39081934094429 +-1.79583804309368 0.111413031816483 +-1.71747960150242 -0.88097608089447 +1.0313155092299 0.249671775847673 +-0.857296355068684 1.14199535921216 +-0.164244167506695 -0.0294807888567448 +0.784875478595495 -0.105897724628448 +1.52135105803609 -0.301928266882896 +-1.70197738334537 0.887017354369164 +-0.29646672680974 1.78780960291624 +-2.43767387792468 2.80297839641571 +-1.07150802761316 1.32888625562191 +1.96760533004999 -0.0822743400931358 +2.54846205562353 2.38143350556493 +1.89138162881136 -0.505773980170488 +0.731997057795525 1.824781332165 +-1.23025154322386 2.37042721733451 +2.10502476617694 1.01790845766664 +-0.289257038384676 0.884468097239733 +0.523988343775272 1.71371848508716 +1.37786173447967 0.152360193431377 +1.29997001588345 -1.4713263399899 +-2.09816644713283 1.11219798028469 +1.24854521080852 1.5086470246315 +-1.54973693564534 1.12164567410946 +-2.53959316387773 2.5507928468287 +1.27415506541729 -0.254352308809757 +-1.36860021948814 -1.07531612738967 +-0.103643998503685 1.1985578648746 +1.15082139521837 -0.408619735389948 +0.665836647152901 1.80017303675413 +-1.99358182400465 2.38361864164472 +1.41355921328068 1.4140549339354 +0.832684136927128 0.27773517742753 +2.55431404709816 1.60115481540561 +0.261851582676172 -1.40409507602453 +0.332075834274292 0.989556409418583 +-1.26759611070156 0.284901026636362 +0.668400105088949 -0.881270457059145 +-2.51353630051017 2.10503265634179 +-0.856151558458805 -0.129454500973225 +-1.73780505359173 2.29603727534413 +-0.219757039099932 1.72422908619046 +1.19712603092194 -0.905392464250326 +0.302428528666496 1.5408494360745 +1.0274640545249 -1.45491936057806 +1.81379937753081 2.27189913764596 +-1.3341751024127 0.13517551869154 +1.20229933783412 -0.973355367779732 +1.84877020120621 2.662238355726 +0.419793553650379 -0.917647793889046 +-1.35152518749237 2.21055740863085 +1.52457546815276 1.54749300703406 +-0.205987624824047 0.103553749620914 +-1.86822104454041 -0.712811257690191 +1.40427971258759 -0.450319468975067 +-0.374406468123198 0.30898892134428 +-0.0756941922008991 1.65920081734657 +-1.4827020354569 -0.148320492357016 +1.75215403735638 0.382022500038147 +-1.62039963155985 -0.642071709036827 +2.08675856888294 2.91796289011836 +1.31535199657083 0.898446545004845 +1.33989696577191 2.26795882731676 +-0.407118331640959 -0.864776853471994 +-0.443020809441805 -0.573748584836721 +-1.14785221591592 0.225090265274048 +-0.633429512381554 1.76854622736573 +-1.7646608799696 -0.507092922925949 +0.0646654926240444 0.300363935530186 +1.64097373187542 0.793197266757488 +-1.98553922772408 1.42902074381709 +-1.14411998540163 -0.964608911424875 +-0.369410734623671 -0.366339981555939 +-0.999868925660849 -0.0337170921266079 +1.45940089970827 2.77967078611255 +-1.31173273175955 -0.517481461167336 +0.833832927048206 1.48648469895124 +-1.52717601507902 2.76573925465345 +0.77579864487052 0.949231553822756 +0.40292314812541 0.866590268909931 +-2.65465438365936 1.95173065364361 +2.35742055997252 1.74279253929853 +-0.355315253138542 1.180599514395 +-1.67465360090137 1.91641176491976 +-0.0689448341727257 -0.909220825880766 +-2.45754731819034 1.930813934654 +-0.831767980009317 1.52211364358664 +1.35855319350958 0.420334480702877 +-2.05117319896817 1.99851844459772 +1.51295032352209 -0.117037374526262 +-0.46902297437191 1.24475559592247 +0.859275877475739 0.793505057692528 +0.396627712994814 0.832260087132454 +1.6038192473352 2.52190383151174 +1.36388263106346 -0.809721235185862 +0.296783827245235 -1.50593790411949 +2.18300115689635 1.51716662198305 +1.50742676854134 1.05989731475711 +0.941965255886316 -0.994952667504549 +-0.881794955581427 0.315977051854134 +0.508670881390572 0.593575734645128 +1.44651893153787 0.682130258530378 +0.907365724444389 1.62373670190573 +1.29709115996957 0.930207539349794 +-1.67135568335652 1.51227409765124 +-2.41698196530342 1.96667265146971 +1.06512136012316 0.518598642200232 +-1.78652555495501 1.06272687017918 +2.17286586016417 2.62133271619678 +-0.13370419293642 0.73027215898037 +-0.695973422378302 0.132425963878632 +1.70408631488681 1.97604199871421 +-1.34619582816958 1.08122764527798 +0.635242152959108 -0.199861787259579 +-1.10192470252514 2.05480550974607 +-0.94717800989747 -0.630246590822935 +1.70300518348813 1.55034571886063 +-2.23884864524007 1.66524270921946 +0.516748148947954 -0.628420948982239 +-0.182907346636057 -0.0645793564617634 +-1.48728507384658 1.28508923202753 +-1.33415547758341 -0.553155098110437 +1.10575541481376 0.989736843854189 +-0.67467175796628 -1.80816911533475 +1.66922806575894 2.65700840950012 +-1.18470480665565 1.74626357108355 +1.24073842167854 -0.374344494193792 +-0.451939143240452 1.71041077747941 +1.64350361004472 -0.187231339514256 +0.586312063038349 -1.28227543085814 +0.752832692116499 1.52826872095466 +1.9040132984519 -0.384332694113255 +0.547763612121344 -0.21344643458724 +-0.470666497945786 -1.21811608970165 +0.76456231251359 -0.415471844375134 +1.30486880242825 0.959521226584911 +0.425816066563129 -0.82542372494936 +-0.859390750527382 -0.779338870197535 +-1.74391374737024 1.65754588320851 +-0.119972255080938 0.741616319864988 +0.506005618721247 0.609428849071264 +-1.1955440826714 2.49096083641052 +-1.19979045912623 -0.0102192126214504 +-0.0651886761188507 -0.99269525334239 +1.27262368798256 -0.254643421620131 +-0.450716532766819 1.62174102291465 +-2.5569444745779 1.6445697247982 +-1.39219756796956 -1.29068395867944 +2.6287630200386 2.14241267368197 +1.91449949145317 2.79996054247022 +1.15883900225163 0.00188637524843216 +0.982614494860172 -0.872258208692074 +1.41929485276341 1.45608067512512 +-2.48507366701961 2.26417628675699 +-2.59149982407689 2.65095861256123 +1.05873238667846 -1.66309567540884 +-1.44110786914825 -0.89272603392601 +1.42776192724705 1.21658572182059 +1.86667015776038 0.478294964879751 +-2.29095414280891 2.70314594730735 +1.03569241240621 1.08555588126183 +2.37809075787663 2.63908607512712 +-0.636470831930637 -1.30283078178763 +0.575023286044598 -0.083458948880434 +-2.95411188527942 1.99973373115063 +1.66172904521227 0.721444431692362 +0.874346606433392 -0.320394761860371 +0.914382807910442 1.72320305556059 +-1.54675411432981 0.810082443058491 +-0.0306556299328804 0.810736399143934 +-0.778492107987404 1.00261883437634 +1.94767145067453 2.39114357531071 +0.12454404681921 1.47197991237044 +0.640425965189934 -0.915675807744265 +1.30756982415915 0.257875148206949 +0.463131565600634 -0.510185789316893 +-1.94290655106306 0.390200689435005 +0.861060719937086 1.69475064426661 +-0.596020724624395 -1.31833404675126 +-0.967709474265575 -1.41061119735241 +1.86148541420698 2.1914074011147 +1.33700554817915 -0.494080230593681 +1.9155209325254 1.17085805907845 +1.5216115154326 -0.656151257455349 +-0.988004796206951 -1.05279023945332 +-1.79974020645022 1.52944333478808 +-1.58436769247055 -0.293444685637951 +-0.379862658679485 -0.562306210398674 +1.54062598571181 -0.585036925971508 +0.54957078769803 0.0526377521455288 +-1.90664768218994 2.79625792056322 +-2.53612166643143 2.13662191852927 +1.25708016380668 -0.423956073820591 +-0.268237039446831 1.23717963322997 +1.50826417654753 -1.21894085779786 +-1.37412555888295 0.956090498715639 +-1.70549262315035 1.26755261048675 +-0.95484346523881 -0.550038985908031 +-1.07965064421296 -0.615462113171816 +-2.45840615779161 2.29837606847286 +2.36267644166946 2.34142276272178 +-1.44061214104295 1.3544699177146 +0.991551131010056 0.192100442945957 +-0.491512577980757 0.23652096837759 +-1.38528561964631 -0.634260300546885 +1.32833851501346 -1.16645000502467 +-0.0383889861404896 -1.9851943179965 +-0.183512911200523 1.44900576025248 +-1.31814044713974 -0.668087001889944 +-2.38698591664433 2.664770077914 +-0.0702574104070663 1.05192149430513 +-1.50116031616926 2.51582760363817 +0.116251889616251 0.859274040907621 +-0.589289251714945 -1.60551861673594 +0.259310230612755 0.956516105681658 +0.408254042267799 0.605765525251627 +-0.353813420981169 0.925535842776299 +2.21094064414501 2.53124418109655 +0.00580412149429321 1.07951683923602 +-0.128202505409718 -1.35972686484456 +-0.0835149474442005 -1.40766746178269 +-0.685722127556801 0.633070666342974 +-0.228976972401142 0.716798476874828 +-0.314142514020205 0.87760741636157 +-1.63135157898068 2.59023092314601 +0.48067232593894 -1.92003295198083 +-0.839801024645567 -0.365892615169287 +0.294471867382526 0.42062134668231 +-2.51683387905359 1.88696189597249 +-0.419275209307671 0.559270903468132 +1.75273936614394 -0.429852154105902 +-0.166549928486347 -1.42955809831619 +-0.493462979793549 -1.54558394104242 +-1.70016938447952 1.3782924413681 +0.0676529332995415 -0.203526772558689 +-0.638028036803007 0.0358610637485981 +-2.8336612097919 1.52146966010332 +-0.779725268483162 0.905260171741247 +1.51560585945845 2.78766296803951 +-0.722736071795225 0.352756779640913 +-1.84800857305527 1.27182929590344 +0.982172276824713 0.501089427620173 +0.307786900550127 0.148968111723661 +1.71013582125306 -0.303893703967333 +-2.0330076366663 1.83816499635577 +0.706926513463259 1.39347774535418 +1.27343387529254 2.10188510268927 +-0.0797674246132374 0.0670081079006195 +-1.29521260783076 2.1950269639492 +0.192932900041342 0.0572373494505882 +0.231393404304981 0.269573837518692 +-0.278286520391703 -0.699632372707129 +0.958856523036957 -0.246140528470278 +0.431317817419767 1.9147194288671 +1.23021182790399 1.18564041703939 +0.451975125819445 0.0850551538169384 +1.66893910616636 2.26893594488502 +-1.73326994478703 1.62885748967528 +-1.69280558824539 1.34039432182908 +0.169915474951267 0.931300632655621 +-1.96734732761979 1.43532429635525 +-1.41516100242734 1.56895295530558 +-1.38352558389306 1.20329409092665 +1.93590002506971 -0.462315123528242 +1.62447164952755 -0.468720607459545 +1.35286011919379 -1.30981515720487 +-0.57337437197566 -1.43897854164243 +-0.752261783927679 -0.412747032940388 +0.460095845162868 1.23995196446776 +-1.2392801977694 0.51284246891737 +-0.41037392988801 0.768059007823467 +-0.783293485641479 1.80975548923016 +1.55770405009389 0.696090243756771 +-2.15515127778053 2.26856860890985 +-1.69504843652248 -0.283982966095209 +-1.57191829383373 2.63137193769217 +0.560173291712999 -1.2617424428463 +-1.6279844045639 -0.617061264812946 +-0.680581245571375 1.0983675122261 +2.03615063801408 1.00735822319984 +-1.73675161227584 1.33363227918744 +-1.86319487914443 0.156173717230558 +-1.24781316518784 0.947194576263428 +-0.391869205981493 1.37865060195327 +0.136743288487196 -1.90284360945225 +0.999140165746212 1.66502907127142 +-1.34718973189592 1.69857199862599 +0.797701835632324 -1.81586116552353 +-0.0178047902882099 1.26013637706637 +0.871120415627956 -0.644829604774714 +0.224457394331694 1.51280977204442 +0.859597332775593 0.919819686561823 +-2.82274105399847 1.71816676482558 +-1.53467748686671 0.467698305845261 +1.55112740024924 -0.47682711482048 +-0.203193809837103 1.10159240290523 +1.76938148960471 2.16226059570909 +-2.65340813621879 1.31114920973778 +-0.756886769086123 -1.03708986192942 +0.238795887678862 0.788461770862341 +-2.00365307927132 2.77997479587793 +-1.78383065760136 0.0650591738522053 +-1.64814525097609 1.73535230383277 +0.461335901170969 -0.95270873233676 +-0.163613315671682 -0.503504827618599 +-1.04729107394814 -0.477535285055637 +-1.81674943119287 1.50311726331711 +-2.63541975989938 2.63445358350873 +-1.67418662458658 2.61837893351912 +1.36899954080582 1.87464229762554 +-0.178647540509701 -1.80277233570814 +0.85903437808156 -0.224506821483374 +-0.435678482055664 1.03995686024427 +-0.138757973909378 -1.00327798724174 +0.348497241735458 -1.73964801430702 +-0.262425389140844 -1.08806496486068 +-1.78051641210914 0.47169267386198 +0.039889931678772 1.75439198687673 +0.514491584151983 -0.682184107601643 +0.811419419944286 -1.64077165350318 +-1.63327368348837 1.09899342805147 +0.0793292708694935 -1.99415926262736 +0.695150371640921 -1.60071619600058 +0.215909935534 1.71501702442765 +-1.62622308731079 2.52008878812194 +-1.74339772388339 0.199343893676996 +0.221649516373873 0.829091165214777 +0.113631065934896 -1.19130916520953 +1.46434355527163 -0.552903942763805 +0.939274299889803 0.933544926345348 +-2.26094883680344 1.27482405304909 +2.4843735396862 2.12675891816616 +2.54108319431543 2.640057310462 +-2.59099861979485 2.66347276046872 +1.87720248475671 0.614995770156384 +0.979909002780914 0.800120614469051 +1.2316859215498 -0.957638815045357 +1.25174754485488 2.22501616179943 +0.827509246766567 0.923771508038044 +-0.457216303795576 -1.36238705739379 +1.92662385478616 0.158460460603237 +0.370570465922356 1.59593136236072 +-1.3883591145277 -1.29616727679968 +0.485752776265144 -0.274427317082882 +-0.198016218841076 -1.36465971544385 +-0.647671967744827 -1.31787549704313 +1.55457253754139 -1.07191491127014 +0.576873153448105 0.523816529661417 +-0.290570918470621 0.00647160038352013 +0.288516711443663 -1.94957103952765 +-0.163418773561716 0.0187129341065884 +-2.21634349599481 1.93698082864285 +1.83702322840691 0.696440611034632 +1.47630554437637 0.346505317837 +-0.151858769357204 -0.101670775562525 +-0.61121342331171 -0.0884107388556004 +-1.56626315042377 1.30609018728137 +0.993360459804535 1.04189617931843 +-0.55113472789526 1.01046715304255 +1.93856098130345 1.03804461658001 +0.576242677867413 -1.89826102554798 +-0.153248734772205 1.15408905223012 +-1.38945835828781 0.839608117938042 +2.80443396046758 1.7546565271914 +0.400887615978718 1.3712489195168 +1.88074469193816 1.25738567858934 +-1.17537642270327 1.13607070967555 +-1.44263596832752 1.65536530315876 +-1.32891277223825 -0.0720656178891659 +-1.13314041122794 1.2361895032227 +-1.18639327213168 0.619502812623978 +-2.35144691541791 2.88997457921505 +-1.26051409170032 -0.68239164724946 +1.90215446427464 0.508575521409512 +2.16122414916754 2.65289481729269 +-2.59469514712691 1.48937852680683 +0.321152102202177 0.543348021805286 +1.50541293248534 -0.106069039553404 +0.758748143911362 0.826878488063812 +1.12901883199811 0.853483460843563 +0.798352342098951 -1.24532177299261 +-1.43217619135976 -0.836231026798487 +0.452941067516804 0.230222824960947 +-1.24927673488855 -1.2335900105536 +-0.0330780185759068 1.10919252410531 +-0.123534847050905 -1.05524901673198 +-0.993950948119164 1.33412408828735 +1.38233162090182 2.54086866602302 +-1.63140538334846 2.82513345405459 +-1.92349470034242 0.141484968364239 +1.30741706863046 0.266764253377914 +2.93539988622069 1.76241579651833 +-1.29918939992785 -1.20068853721023 +-0.580944359302521 -0.760914769023657 +-0.145981598645449 -0.341704532504082 +0.823990140110254 0.244071137160063 +1.37918548285961 -0.153212416917086 +-1.19075786694884 -0.0831953920423985 +0.220914229750633 -0.0651553757488728 +-0.552572384476662 0.543335869908333 +1.57127697393298 0.449144970625639 +0.0944934375584126 -1.60683101788163 +-1.38165763393044 1.61453429237008 +0.0320120491087437 -1.21238803863525 +-2.61676991730928 1.6692368760705 +0.568882815539837 -0.750609058886766 +2.7475122846663 2.31340386345983 +-0.631636992096901 -0.869823697954416 +-1.02035494893789 1.23426667973399 +-2.30327251553535 1.07606317847967 +1.2536935955286 -1.02421547472477 +-0.0205694772303104 0.201753720641136 +1.98304214328527 2.87250499799848 +1.72979063540697 1.22986351326108 +-1.5397043004632 2.73696993291378 +0.0236088894307613 -0.444173123687506 +-1.87184008955956 2.14831002801657 +1.48865288868546 2.61171882227063 +-0.00229827687144279 -0.0606071501970291 +0.251513376832008 -0.213689349591732 +0.625349216163158 -0.363451439887285 +0.703389417380095 -0.961676374077797 +-1.08963143825531 2.39075188711286 +1.65403419360518 1.81590060889721 +0.0498038753867149 0.609751727432013 +-0.911196287721395 0.450370956212282 +-1.27375311776996 -0.626686055213213 +0.170301612466574 -0.0526948645710945 +-0.473431710153818 -1.73027384281158 +2.67757051065564 1.46168432384729 +0.918323513120413 -0.682236436754465 +1.01495398581028 -0.343121137470007 +2.65361326560378 1.51973400264978 +0.577519971877337 -0.250002410262823 +-2.5977134257555 2.3665869012475 +-1.50275629013777 1.82382469251752 +-1.27371744066477 1.90497035160661 +1.15311259776354 -0.835548166185617 +0.91357284411788 -0.424510281533003 +0.248583793640137 -0.479910355061293 +1.04244128242135 0.964618571102619 +0.32025945559144 -1.9448094367981 +-1.35571758821607 1.81048074737191 +-1.13960805535316 -1.37360360473394 +0.83403517305851 1.4062983840704 +-1.78757210075855 2.91149929538369 +-1.43691016733646 0.197713892906904 +-0.691463831812143 -1.74857584387064 +-1.45266249775887 -0.515165153890848 +0.884010519832373 -0.371119938790798 +-2.58516504988074 1.61748562753201 +-0.784489590674639 0.345853377133608 +-1.33161356672645 -0.903216425329447 +2.32880504056811 2.29091412946582 +1.80405613034964 0.389507248997688 +-1.46485745161772 -0.751125585287809 +-2.74127555638552 1.59760411083698 +-2.08028296753764 1.62259260565042 +1.56542291119695 -1.15426879003644 +0.492393050342798 0.999535616487265 +1.00796457380056 0.457884706556797 +2.28164030611515 1.57319641858339 +-1.31747916713357 0.0246546342968941 +1.59498060122132 2.73983108624816 +2.09613207355142 2.91043724492192 +-0.0928696244955063 0.590001679956913 +0.602534610778093 0.686082504689693 +-1.26303784176707 1.27988543733954 +-1.38072663545609 1.92611268535256 +0.757522255182266 1.67052263021469 +0.413585178554058 0.980075154453516 +-0.144942931830883 1.9943518564105 +-1.14773716777563 1.37818126380444 +1.72325848788023 2.52823302894831 +-2.34290808439255 2.73114396631718 +-0.179001700133085 -0.0550750643014908 +-0.41447664052248 -0.554839044809341 +-0.0129327736794949 -1.59723911061883 +1.39581148326397 0.574153542518616 +-0.935020532459021 -1.30064840242267 +1.76168300956488 2.23809352517128 +-0.795723930001259 -0.957099143415689 +-0.99784841388464 0.837770022451878 +-0.209455534815788 -1.06291905790567 +-0.225433152168989 -0.259807176887989 +-0.301143910735846 0.0607243403792381 +-1.27852840349078 2.62999890372157 +-1.70671638846397 0.301549430936575 +-1.61815506592393 -0.235432423651218 +1.50110031664371 1.54744931682944 +-0.594201270490885 -0.864518139511347 +0.972399964928627 1.13791256025434 +-0.748130522668362 -1.0156760700047 +-1.41649935394526 1.54398607462645 +0.114728171378374 0.319723188877106 +-0.853777911514044 0.755842469632626 +-2.08339603617787 1.36205420270562 +1.55280862003565 2.27700191363692 +0.200363930314779 -1.44019917398691 +1.24844966828823 1.28416742011905 +-2.75884722918272 1.40484087914228 +-0.333295788615942 -1.35368778929114 +-1.22975806519389 1.97530345246196 +-1.58023802191019 -0.413470722734928 +0.0334147177636623 -0.513916682451963 +0.723606489598751 1.02234188094735 +-2.71192400157452 2.60093681141734 +1.98991824313998 2.24736044555902 +0.202114675194025 -1.30011815205216 +0.574047591537237 -1.50872865691781 +0.155146811157465 -0.387892227619886 +1.15880945324898 -1.34263291954994 +-2.33879107609391 2.50527653843164 +1.56092922016978 0.167431049048901 +0.522793278098106 1.45531935244799 +-1.54275423288345 1.15091450512409 +-0.206551656126976 1.17583490908146 +-1.64678621292114 0.81617621704936 +1.39004661142826 -1.36211525648832 +-2.31573989987373 2.75242104381323 +-0.878732800483704 0.0429337695240974 +1.24058966711164 -0.147294286638498 +-1.23168898373842 -0.342192586511374 +0.997788283973932 -0.652579020708799 +0.884458281099796 -1.02352900803089 +1.45759299397469 0.601071204990149 +-0.496292110532522 1.05267734453082 +0.90608487278223 -1.27165342494845 +1.5320561863482 2.79862427338958 +1.0955261066556 0.816902466118336 +0.775460373610258 1.63069909438491 +-0.382957138121128 0.93674449250102 +-1.69352323561907 0.294119261205196 +0.0259130746126175 -0.343311481177807 +0.0930146016180515 0.185167338699102 +1.32440802082419 -0.415522642433643 +0.812604624778032 -0.312057659029961 +-0.0775798782706261 1.50756289064884 +-1.00173954293132 -0.215883601456881 +2.67004242166877 1.4218055345118 +1.30557139217854 0.40382607281208 +-1.09812697768211 0.232875410467386 +-1.2340138964355 -0.684770572930574 +0.198747079819441 0.870595429092646 +-0.0235326960682869 -1.08960454910994 +-1.67062331736088 2.7199916318059 +2.41180004179478 1.96345344558358 +-1.88445619493723 0.261486139148474 +-0.221267484128475 0.652395274490118 +-1.30120273306966 0.328293528407812 +1.78156499192119 1.34660445526242 +-2.32475391030312 1.95731552690268 +-0.684544961899519 -0.527581188827753 +-1.25115188583732 2.44621851295233 +-1.99148962274194 1.38984725996852 +-0.485062200576067 0.906633965671062 +-0.40848821029067 1.16564637422562 +-1.22740808129311 1.37795377522707 +-1.96416359767318 2.90018537640572 +2.09066881239414 1.17756043002009 +-2.37176163494587 2.83598211407661 +-1.32914339005947 1.654003161937 +1.57798295840621 0.151651278138161 +-1.02515124902129 1.63954532891512 +-0.998364966362715 0.264427300542593 +0.128578003495932 0.178472761064768 +-2.47440527006984 2.01879696547985 +-0.412627834826708 1.78635150194168 +-1.55071283131838 0.0105222910642624 +1.80423687398434 1.65973404794931 +-1.77880709990859 -0.528857339173555 +-0.325239479541779 1.27368592843413 +-0.356961462646723 1.657122656703 +0.596778780221939 -0.239285033196211 +0.994327709078789 -0.490970946848392 +2.11159466207027 2.66314993798733 +-2.0514238961041 1.25417925789952 +0.321315355598927 -0.822392407804728 +-1.26050077751279 1.34788377210498 +2.83669743686914 1.59180461242795 +1.16895213350654 0.14076566323638 +-2.31611763313413 2.21438378095627 +-1.18570892885327 1.07820708677173 +-2.81989406421781 1.97559417784214 +-2.35266311094165 1.80736849829555 +-0.0144418105483055 -1.66389140486717 +1.77999041974545 -0.800824642181396 +2.19892741367221 2.63303076848388 +2.54977072775364 2.61417213827372 +-0.399920649826527 -1.80029607564211 +1.23175796493888 -0.498138599097729 +-2.49276819452643 2.33056356012821 +-2.75430783629417 1.75094221532345 +-0.993552930653095 -0.716093100607395 +1.11204611137509 1.92595108225942 +1.9606952778995 1.21276931092143 +2.50044172257185 1.69504882767797 +-1.48710029944777 1.59553980082273 +-1.87748043984175 -0.36412887275219 +-1.96315219625831 2.17867825552821 +0.0192176178097725 -1.25796961784363 +0.316905941814184 -1.09806570038199 +-1.64277904480696 0.194241311401129 +0.0660528838634491 0.244520992040634 +-0.16504018381238 -0.40176809951663 +-0.775603737682104 0.0182349272072315 +1.18007648363709 1.14607951045036 +-1.49661236256361 0.644469138234854 +-0.166560478508472 -1.38236621394753 +-0.492110334336758 -1.09931644052267 +-0.659330703318119 -0.0277940034866333 +1.17636237666011 -0.855551443994045 +0.861180480569601 0.24104718118906 +2.52849067002535 1.61936584115028 +1.07524510100484 0.08399398624897 +1.37647414579988 1.43195811286569 +-1.04186408221722 -1.40898194164038 +0.792264949530363 1.78557724505663 +-1.41825994104147 1.80304077267647 +0.348236609250307 1.24237727373838 +-2.25615164637566 2.18553548306227 +1.7466052621603 2.21175952628255 +-2.14185 2.59911 diff --git a/inst/dev/datasets/cec/mouse_1_spherical/iteration.txt b/inst/dev/datasets/cec/mouse_1_spherical/iteration.txt new file mode 100644 index 00000000..7813681f --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/iteration.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_1_spherical/type.txt b/inst/dev/datasets/cec/mouse_1_spherical/type.txt new file mode 100644 index 00000000..4e76fb85 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_1_spherical/type.txt @@ -0,0 +1 @@ +spherical \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_2_spherical/aproximation.txt b/inst/dev/datasets/cec/mouse_2_spherical/aproximation.txt new file mode 100644 index 00000000..7813681f --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/aproximation.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_2_spherical/cluster.txt b/inst/dev/datasets/cec/mouse_2_spherical/cluster.txt new file mode 100644 index 00000000..7feff1f3 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/cluster.txt @@ -0,0 +1,3000 @@ +2 +2 +3 +2 +2 +3 +1 +3 +2 +2 +2 +3 +2 +1 +2 +1 +3 +2 +2 +1 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +3 +2 +2 +2 +3 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +1 +1 +3 +2 +2 +3 +2 +2 +1 +2 +2 +3 +1 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +1 +2 +3 +1 +2 +1 +2 +2 +2 +2 +3 +1 +1 +2 +1 +2 +2 +2 +3 +2 +3 +2 +1 +1 +2 +1 +3 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +2 +2 +3 +3 +3 +2 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +1 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +3 +1 +2 +2 +3 +2 +1 +3 +3 +3 +3 +3 +2 +2 +3 +1 +3 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +1 +3 +2 +2 +3 +3 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +3 +2 +1 +2 +2 +3 +2 +1 +3 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +1 +2 +2 +2 +2 +3 +1 +2 +1 +2 +3 +2 +3 +2 +1 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +1 +2 +1 +2 +3 +1 +2 +2 +2 +3 +3 +2 +2 +2 +1 +2 +2 +3 +3 +2 +3 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +3 +3 +2 +3 +2 +3 +1 +2 +2 +2 +1 +1 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +1 +1 +2 +2 +2 +2 +2 +1 +2 +2 +3 +1 +1 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +1 +2 +1 +2 +3 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +3 +1 +2 +2 +2 +1 +2 +1 +2 +2 +1 +2 +2 +3 +2 +1 +2 +2 +1 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +1 +2 +1 +2 +1 +2 +2 +3 +2 +3 +3 +3 +2 +2 +3 +2 +2 +2 +2 +1 +2 +3 +1 +2 +2 +1 +2 +1 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +3 +3 +2 +1 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +1 +2 +1 +1 +2 +2 +3 +2 +1 +2 +3 +2 +1 +2 +2 +2 +2 +1 +2 +3 +2 +2 +1 +2 +2 +2 +1 +3 +1 +1 +2 +1 +2 +2 +1 +2 +2 +3 +2 +2 +2 +3 +2 +3 +2 +2 +1 +3 +3 +1 +2 +2 +3 +1 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +1 +3 +1 +2 +2 +1 +2 +2 +2 +1 +3 +3 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +1 +3 +2 +2 +1 +3 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +3 +3 +3 +1 +2 +2 +1 +1 +2 +2 +2 +3 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +3 +2 +2 +1 +1 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +3 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +3 +1 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +3 +3 +2 +2 +2 +2 +2 +3 +3 +3 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +1 +2 +1 +2 +1 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +3 +1 +2 +3 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +1 +2 +3 +1 +3 +1 +2 +3 +2 +3 +2 +1 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +3 +1 +1 +2 +2 +2 +2 +1 +3 +3 +2 +3 +2 +1 +2 +1 +2 +2 +1 +2 +2 +3 +3 +2 +2 +3 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +3 +1 +3 +2 +2 +1 +3 +2 +2 +2 +1 +3 +2 +2 +1 +3 +2 +2 +1 +1 +2 +3 +1 +1 +1 +3 +2 +2 +3 +1 +1 +2 +1 +1 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +1 +3 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +1 +1 +1 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +1 +2 +1 +2 +2 +3 +3 +2 +3 +3 +2 +2 +3 +2 +1 +2 +2 +2 +2 +1 +3 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +3 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +1 +1 +2 +2 +2 +2 +2 +1 +2 +3 +1 +2 +3 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +3 +3 +1 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +3 +3 +2 +3 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +3 +3 +2 +2 +1 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +3 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +1 +1 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +1 +1 +1 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +2 +3 +3 +2 +1 +2 +1 +2 +3 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +3 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +3 +1 +2 +2 +2 +3 +3 +1 +2 +2 +2 +2 +2 +2 +2 +3 +1 +3 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +1 +2 +2 +3 +2 +1 +3 +1 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +3 +1 +2 +1 +2 +1 +1 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +3 +2 +3 +1 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +1 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +1 +2 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +1 +3 +2 +2 +1 +2 +2 +3 +2 +3 +2 +2 +1 +1 +1 +1 +2 +2 +1 +3 +2 +2 +3 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +1 +1 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +3 +1 +2 +2 +3 +2 +2 +2 +3 +2 +1 +3 +2 +3 +2 +1 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +2 +3 +3 +3 +1 +3 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +1 +3 +3 +2 +2 +2 +3 +1 +2 +2 +2 +3 +2 +2 +2 +2 +1 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +2 +3 +3 +3 +2 +3 +2 +3 +1 +1 +2 +3 +3 +2 +2 +3 +1 +2 +2 +2 +2 +3 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +3 +2 +2 +2 +2 +2 +1 +2 +2 +2 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +3 +2 +3 +2 +2 +1 +2 +1 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +3 +2 +2 +2 +3 +3 +3 +1 +2 +2 +2 +2 +3 +2 +2 +3 +1 +3 +1 +2 +3 +1 +3 +3 +2 +3 +2 +1 +2 +2 +1 +2 +1 +3 +1 +3 +2 +1 +3 +2 +2 +2 +2 +1 +1 +1 +3 +3 +2 +2 +2 +1 +1 +3 +2 +2 +2 +3 +3 +1 +2 +2 +2 +2 +3 +1 +2 +3 +2 +2 +2 +3 +3 +3 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +3 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +1 +2 +1 +3 +3 +2 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +1 +2 +2 +3 +2 +1 +2 +1 +3 +1 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +3 +1 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +1 +2 +2 +1 +2 +3 +3 +2 +3 +3 +2 +3 +2 +2 +1 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +1 +2 +1 +2 +2 +2 +1 +2 +2 +3 +3 +2 +2 +2 +2 +2 +3 +2 +2 +3 +3 +2 +2 +3 +2 +3 +3 +2 +2 +3 +3 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +1 +2 +1 +1 +2 +1 +2 +2 +2 +2 +3 +2 +3 +2 +1 +3 +2 +3 +2 +2 +2 +2 +1 +2 +3 +1 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +1 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +1 +2 +1 +2 +1 +2 +1 +2 +2 +2 +3 +1 +2 +1 +3 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +3 +2 +2 +1 +2 +2 +1 +1 +2 +1 +2 +2 +2 +2 +2 +3 +1 +2 +3 +2 +1 +2 +2 +2 +2 +2 +3 +3 +2 +2 +1 +3 +2 +2 +1 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +3 +1 +3 +2 +3 +2 +2 +2 +3 +1 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +3 +1 +3 +2 +2 +2 +1 +2 +3 +2 +3 +2 +2 +2 +1 +3 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +1 +2 +3 +2 +2 +2 +2 +3 +2 +3 +1 +2 +2 +2 +3 +1 +3 +3 +2 +2 +2 +1 +2 +3 +2 +1 +2 +2 +3 +2 +1 +2 +2 +3 +2 +2 +3 +1 +3 +2 +3 +2 +2 +2 +2 +2 +2 +2 +3 +2 +1 +1 +2 +2 +2 +1 +2 +3 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +3 +1 +3 +3 +2 +2 +2 +1 +2 +2 +1 +2 +3 +2 +2 +1 +1 +1 +1 +2 +3 +1 +2 +1 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +2 +1 +1 +2 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +1 +2 +1 +1 +3 +2 +1 +2 +2 +2 +3 +2 +2 +3 +2 +2 +2 +1 +3 +1 +3 +2 +2 +2 +2 +1 +2 +2 +3 +2 +1 +3 +1 +1 +3 +2 +2 +2 +2 +2 +3 +1 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +2 +2 +2 +2 +1 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +1 +2 +2 +1 +1 +1 +3 +3 +2 +2 +3 +3 +2 +1 +3 +1 +2 +3 +2 +1 +2 +2 +2 +2 +2 +1 +1 +2 +1 +2 +2 +1 +2 +2 +1 +2 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +2 +2 +1 +2 +2 +2 +3 +2 +2 +2 +2 +2 +2 +3 +3 +2 +1 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +1 +3 +2 +3 +2 +2 +2 +3 +2 +2 +1 +2 +2 +1 +2 +2 +2 +2 +2 +2 +3 +3 +3 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +3 +3 +2 +3 +3 +3 +2 +2 +1 +2 +2 +2 +2 +1 +2 +3 +2 +2 +2 +2 +2 +2 +2 +2 +2 +2 +1 +2 +3 +1 +2 +2 +3 +2 +1 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +3 +3 +2 +2 +2 +2 +2 +2 +1 +2 +2 +2 +2 +1 +1 +3 +2 +2 +3 +2 +2 +3 +2 +2 +2 +1 +2 +2 +1 +3 +2 +2 +2 +2 +2 +1 +1 +2 +1 +3 +1 +2 +2 +2 +1 +2 +2 +2 +1 +2 +2 +3 +2 +2 +2 +2 +2 +2 diff --git a/inst/dev/datasets/cec/mouse_2_spherical/dimension.txt b/inst/dev/datasets/cec/mouse_2_spherical/dimension.txt new file mode 100644 index 00000000..d8263ee9 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/dimension.txt @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_2_spherical/energy.txt b/inst/dev/datasets/cec/mouse_2_spherical/energy.txt new file mode 100644 index 00000000..04471717 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/energy.txt @@ -0,0 +1 @@ +3.188627 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_2_spherical/input.txt b/inst/dev/datasets/cec/mouse_2_spherical/input.txt new file mode 100644 index 00000000..5162b903 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/input.txt @@ -0,0 +1,3000 @@ +-0.649097640067339 0.408296354115009 +-0.807575933635235 0.52262195572257 +-1.41406632587314 2.29283641651273 +-0.240965705364943 -1.35587005317211 +-0.809006534516811 1.14556189998984 +-1.80652942508459 1.81081376969814 +1.74072166532278 1.73592169582844 +-1.60198412463069 2.38456276804209 +0.153803925961256 -1.19985854625702 +0.901008110493422 -0.00610697641968727 +1.74970718100667 -0.42045484483242 +-1.22204675897956 1.71583975479007 +0.201163019984961 -1.3493556715548 +2.05124697834253 1.76567437872291 +1.08383569866419 0.0273916497826576 +1.85891108587384 2.57802725583315 +-1.72260251268744 1.595434371382 +-0.170769318938255 -1.02323380485177 +0.575668703764677 1.09080771356821 +2.5843552313745 1.32546020299196 +2.24712392315269 2.26314137130976 +0.524479646235704 1.38298406451941 +0.21388453617692 -0.846555911004543 +0.782779227942228 1.1929050013423 +1.59244503825903 -0.139530632644892 +-0.808999843895435 1.90318553894758 +0.296390544623137 -0.126624930649996 +0.6087631508708 1.50254296883941 +0.383080169558525 -1.91779600828886 +1.02740608528256 0.294794194400311 +1.05604484677315 -0.310404811054468 +-0.637498632073402 -0.97230389714241 +2.70216261222959 1.89261027798057 +-1.74487449228764 0.902358707040548 +-1.83358019962907 -0.661321215331554 +1.80764297768474 0.718597035855055 +0.254763387143612 -0.13969711586833 +-1.79165107756853 -0.589060664176941 +-1.45225536823273 0.751764100044966 +-0.164662756025791 -1.88367027416825 +-0.810914486646652 1.93478035926819 +-1.34299085661769 2.33965306729078 +-1.02718298137188 0.289366170763969 +0.157090734690428 -1.48688578233123 +-1.32469834759831 -0.418268729001284 +-0.120088029652834 1.76124199107289 +-2.29028855636716 1.29390174895525 +-1.49726908281446 1.40729768946767 +-0.82045567035675 -1.51267048716545 +0.352213069796562 1.31428953632712 +-0.0485709607601166 -0.9891691878438 +-2.39139356464148 1.55668933317065 +0.675613190978765 0.358655091375113 +-0.276534218341112 -1.70343945547938 +-2.17169430106878 1.73204506933689 +-2.35859847068787 1.01706775277853 +0.211742348968983 1.42261929064989 +-0.87984311953187 0.309021204710007 +1.26032826676965 -1.32619705423713 +-1.62375094741583 -0.860062874853611 +0.845773503184319 -1.06038359180093 +-0.299055237323046 1.89645006135106 +-1.16978869959712 -0.0453206524252892 +0.00937087461352348 1.70037674158812 +0.316584065556526 -1.83410523086786 +-2.36573754623532 2.31867336109281 +1.51097824424505 -0.391053941100836 +-2.21880022808909 2.24191799759865 +2.13661048933864 1.15900098159909 +2.28666597977281 1.43850096315145 +-1.41501178964972 1.73488023504615 +-1.5824085213244 -0.117858484387398 +-0.175819251686335 0.14159919694066 +-1.27550159394741 2.31767152249813 +-1.32359965890646 -0.00245634838938713 +0.128691803663969 1.41652270779014 +1.48762753233314 2.44081779196858 +0.467659562826157 1.8344347178936 +-1.9096390157938 0.308322887867689 +-2.51132389903069 1.92527939379215 +1.04675855860114 1.69413363188505 +-0.305342454463243 0.466831374913454 +0.511901661753654 -0.576594542711973 +-0.115777213126421 -0.212807189673185 +1.7189520932734 -0.717853203415871 +-1.07156491652131 -0.969216786324978 +-1.68339177966118 1.79581397399306 +-0.931234091520309 -0.424996912479401 +-0.402811247855425 0.544060494750738 +-0.351966172456741 -0.111439235508442 +-2.55422627925873 2.07314501702785 +1.43800851330161 -0.673132009804249 +0.503016088157892 -1.65846740081906 +1.8771394379437 2.2156242839992 +0.0890492461621761 1.39058675244451 +-1.89451185613871 2.62819213792682 +1.51394177600741 2.49626232683659 +0.220543250441551 0.269982133060694 +2.03368187695742 1.1186650171876 +-0.504734892398119 1.29446469992399 +-1.65497443079948 0.331856176257133 +-0.790675215423107 1.5271401964128 +0.0276868119835854 1.93710817024112 +-2.23567853868008 1.36877425387502 +1.8592628352344 1.91974463313818 +1.60720717534423 1.85647208616138 +1.65314513444901 -1.01490608230233 +2.26698673889041 0.994411997497082 +-1.26782600954175 1.12731617316604 +-0.900171473622322 -0.219711471349001 +-0.626016240566969 1.34829626604915 +-1.92707321420312 0.973092701286077 +-0.283227507025003 1.40018428489566 +-0.950807083398104 2.02043945342302 +-0.550827290862799 0.401831816881895 +1.62617671489716 1.57836598530412 +1.95642914250493 2.06269023939967 +-0.687410209327936 -1.49394136667252 +1.06357636675239 1.84001991525292 +-2.0955977216363 1.7160473652184 +-0.0589525252580643 0.733373906463385 +1.72341998666525 2.63361315056682 +1.10096355527639 -1.21557977423072 +-2.03320637717843 2.69310788065195 +-0.552457291632891 -0.0893738493323326 +-0.834173858165741 -1.5873536914587 +-0.165886547416449 -1.90308615192771 +0.206223525106907 1.76236512511969 +0.607093192636967 0.977219618856907 +-0.986789967864752 0.925752770155668 +1.94170973449945 1.38060898333788 +1.52236812189221 2.36051461100578 +0.807106249034405 1.57448329776525 +0.402864366769791 -1.11093397811055 +-1.30890310183167 -1.15134290978312 +1.10590060800314 0.563718106597662 +-0.457007244229317 -0.914427168667316 +0.60820809751749 -0.418481308966875 +-1.13846439123154 1.00987203791738 +1.06302529573441 -1.29263773933053 +0.145529467612505 1.80158681422472 +-0.159402526915073 1.33445791900158 +-1.74739282205701 -0.632589466869831 +0.234247252345085 -1.5872844606638 +-1.02311493083835 -1.18169776722789 +-0.844088796526194 1.82367134839296 +0.932040877640247 1.17455987632275 +0.611630886793137 -1.74438877031207 +0.974653799086809 0.922471933066845 +1.16440780460835 -0.35607922822237 +-1.1203256547451 0.74524999037385 +0.826645884662867 0.0575481131672859 +-1.02164890617132 0.820258669555187 +1.56061197817326 -0.049822524189949 +1.54217150434852 -0.220934148877859 +-0.178020756691694 0.640313558280468 +0.578101921826601 -1.73060950264335 +1.18077543750405 2.56696271523833 +0.819916412234306 1.97414798662066 +-0.206230536103249 -1.93741822242737 +1.3480294495821 -1.07584491372108 +-1.91013825312257 2.64748274162412 +-1.79123204946518 0.0233245380222797 +-1.72292440757155 0.902466144412756 +-1.69480007141829 1.99826105311513 +-0.447371613234282 1.14098888263106 +-0.697093136608601 1.48328424990177 +1.78722858056426 -0.0887695290148258 +-0.697849456220865 -1.59372465312481 +-2.73872856795788 1.78895360976458 +-1.17436996102333 1.12454633414745 +-0.833971738815308 -0.449261449277401 +0.805052295327187 1.16184240207076 +1.2493119686842 -0.66439813375473 +-0.751456402242184 0.0465897023677826 +0.533901270478964 1.34532299637794 +1.39566580951214 1.08905233070254 +-1.24247882142663 -0.304575245827436 +1.05523022636771 1.42991114407778 +-1.93224615976214 -0.177230473607779 +0.989966630935669 -0.915879305452108 +-1.47454807907343 0.285879857838154 +0.234050489962101 1.62827354669571 +-1.79186031594872 2.24100651592016 +-1.71167525276542 2.58818038925529 +-0.935824904590845 2.16395191475749 +-0.439499601721764 -1.40899714827538 +2.51507068052888 2.10940356552601 +-0.249615002423525 0.612322341650724 +0.0917461179196835 1.10659458115697 +-0.656467668712139 1.41009653359652 +0.107298050075769 -1.2438183426857 +-2.38969913497567 1.87293467670679 +-1.10248775035143 0.0937480628490448 +-1.28762873262167 -0.581416588276625 +-0.332925286144018 1.23400517180562 +-0.905617691576481 -1.41142004728317 +-1.8205388635397 0.0720383413136005 +-0.416185487061739 -0.116582173854113 +0.867729399353266 -0.525488406419754 +0.452201452106237 1.15462157875299 +0.683328852057457 0.417478233575821 +0.460333365947008 0.217518914490938 +-1.29856794327497 -0.504395689815283 +0.203640807420015 -0.966524284332991 +1.78165986016393 0.97632172703743 +0.461797770112753 0.47015680000186 +0.678831156343222 -0.985908523201942 +-2.44774684309959 2.30767585709691 +0.656247045844793 -0.311436947435141 +-0.742664821445942 0.685851112008095 +1.559555567801 1.37560191377997 +-1.8944887034595 -0.406724855303764 +1.67478524520993 1.67824802547693 +-0.167668052017689 0.275063134729862 +1.48011878877878 -1.13171013444662 +-0.549465779215097 -1.02472613379359 +-0.496278535574675 1.50698729977012 +0.00424785912036896 1.05739696323872 +1.32454036921263 2.53060096129775 +-0.351922743022442 0.976038951426744 +0.633204605430365 1.24695154651999 +-1.109858058393 1.12414477020502 +2.0539874099195 0.84529972076416 +-2.09678584709764 1.51815884187818 +1.78548592701554 0.68286195024848 +-0.299079090356827 -0.370336677879095 +-0.364061612635851 0.268065985292196 +1.34419587999582 0.948234345763922 +-1.08022587373853 0.338301848620176 +-1.33811064064503 1.50825317949057 +0.278141163289547 1.62119785323739 +-0.187076762318611 -0.83244987949729 +-0.164762575179338 -1.25855301693082 +1.72502038255334 -0.0607107989490032 +-1.59953481331468 0.199259925633669 +0.931840170174837 -0.76713440194726 +-0.21122071146965 0.441190075129271 +-1.5569927059114 1.76591106876731 +2.54668017476797 2.01522625237703 +-0.827849600464106 -1.61512494087219 +-2.6429272852838 1.95840268209577 +0.803739633411169 -1.60825137421489 +-0.714344631880522 0.57532100006938 +2.04923632740974 1.93540675938129 +1.12112950161099 -0.855257648974657 +-1.6833373978734 0.401003874838352 +0.444536741822958 0.513516515493393 +-0.325854849070311 0.225233498960733 +-1.63691977411509 0.769343361258507 +-1.06190749630332 -1.47382890805602 +1.25153439491987 0.439794078469276 +-2.71023654192686 1.44735569879413 +-0.378867454826832 -0.658153384923935 +1.92596044018865 0.0139703974127769 +1.5230704434216 -0.1167813539505 +1.69972512125969 -0.578212775290012 +-1.10212778672576 2.35502851381898 +-1.14577504247427 1.22035431489348 +-0.803814891725779 -0.695003140717745 +-1.53372422605753 -0.223905734717846 +1.34636790677905 2.19239231944084 +1.39678496494889 -0.140033803880215 +-0.906028233468533 1.02058485150337 +-0.905020594596863 1.60176100954413 +0.69533709436655 -1.75085057318211 +0.562092565000057 1.385039485991 +-0.935775704681873 1.56089590489864 +2.67298435419798 1.66798298805952 +1.70550078898668 -0.177045695483685 +-1.53244153782725 1.75126794353127 +2.39110692963004 2.49939171224833 +0.414882753044367 -1.77925358340144 +-0.448651764541864 -0.111862167716026 +-1.72269093245268 2.46203555911779 +-1.12074176967144 0.790610011667013 +1.45686508715153 2.14977977797389 +-1.40698473900557 2.34604375809431 +-1.66650464013219 1.66787406057119 +-2.32455184683204 1.96515868976712 +-1.2932029850781 1.88042332604527 +-1.65378860011697 1.03349739685655 +0.561880085617304 -0.0831819288432598 +0.44348955526948 1.7116673886776 +-1.51317059621215 2.45782340317965 +1.51566876098514 1.30907212942839 +-1.53910152614117 1.86735344305634 +0.0164519287645817 1.18712798506021 +0.457188356667757 -1.9288779348135 +1.60945073142648 0.0128599628806114 +-0.406986735761166 1.24872826784849 +-1.75171378999949 1.60045084357262 +0.522046159952879 0.454779043793678 +-1.51002214476466 -0.433382030576468 +-1.58840509876609 -0.128056056797504 +1.90912174060941 1.24426994472742 +-0.308238852769136 0.500964496284723 +0.253129839897156 -1.91529872268438 +1.44686910137534 2.06149318441749 +-1.71933726593852 2.01276142150164 +0.134064249694347 0.838784523308277 +0.580334234982729 1.63753580674529 +-1.92858282104135 2.37410202994943 +-1.91586883738637 2.65254385396838 +0.855840619653463 0.85561840981245 +1.66241204366088 1.63577385619283 +-0.456809148192406 0.588008180260658 +-1.72457643598318 2.03136116266251 +-0.887050725519657 1.09892199933529 +-0.0716216079890728 0.850509487092495 +0.304502084851265 -0.626962549984455 +0.0687862411141396 1.20972913876176 +1.61829642951488 0.648711558431387 +0.0384910888969898 0.637048047035933 +-0.442379929125309 0.149296771734953 +-0.123289000242949 1.21787907555699 +1.21790616214275 2.26508320495486 +0.372837968170643 0.40275139734149 +-1.26345077902079 0.399464283138514 +-0.210570450872183 -0.846550092101097 +-1.40443550050259 -0.945557031780481 +-1.12280105799437 1.73236264660954 +0.499381627887487 1.6591210514307 +1.93672286346555 1.69963704049587 +-0.555817767977715 0.167575836181641 +0.321644153445959 1.25036808848381 +-1.27752850204706 2.58774822577834 +-1.84327478334308 -0.25649144500494 +2.05685747042298 1.47432240098715 +-2.6886912509799 1.44711922109127 +-0.684131778776646 1.47254236415029 +0.846364874392748 1.36625817418098 +0.172955509275198 1.97253074496984 +2.31451424211264 2.01733453944325 +2.12910757958889 2.23743064329028 +-1.38825725018978 -1.31328041106462 +-0.514949969947338 -1.4563838429749 +1.25876485183835 0.695539738982916 +-1.74532884731889 -0.529113229364157 +0.0632135383784771 -0.937855530530214 +1.61985495314002 0.335637699812651 +-1.50588108599186 2.16938721761107 +-1.16501285135746 -0.630299061536789 +1.11890789121389 1.88414286822081 +1.56510501727462 0.264411639422178 +2.22334563732147 1.97165543213487 +-0.257768474519253 0.0743283033370972 +-0.00386032462120056 -1.34355297684669 +-0.539586879312992 1.92520007863641 +0.470838729292154 -0.122166138142347 +-0.915041159838438 1.68745474144816 +1.36663690581918 1.2120655477047 +-1.89171198382974 0.156909734010696 +2.53897904232144 1.65131259709597 +1.41564970090985 0.782911960035563 +-1.71902780979872 2.53667014092207 +1.60216020420194 0.241761539131403 +-1.59376062080264 1.97202388942242 +1.06155529245734 -1.00237921625376 +0.890628285706043 2.14203393831849 +1.87093214318156 0.974581312388182 +1.79465992003679 -0.59504684060812 +1.9656959399581 2.73552702367306 +-0.21941751986742 -0.0763701647520065 +-1.81483649462461 -0.761188808828592 +0.130891311913729 -0.574549216777086 +0.996602244675159 -1.227055080235 +-0.271618872880936 -0.704240266233683 +1.69178124889731 -0.0362846627831459 +-1.33337722346187 -0.392266537994146 +1.24050430580974 -0.623898513615131 +-0.223169829696417 0.377220332622528 +-0.977340869605541 0.234108865261078 +-1.56698390096426 -0.207747425884008 +-1.41844560205936 0.351533249020576 +-0.836925115436316 1.92356771603227 +-1.36012962833047 0.7361014559865 +0.450691517442465 -0.62854116037488 +-0.0564126819372177 -0.944917216897011 +1.60607011988759 -0.638820480555296 +0.396077539771795 -1.39448064565659 +0.926537677645683 0.694845594465733 +2.19373325258493 2.31176307424903 +2.10219293087721 2.57549534365535 +1.27875134721398 -1.34554563462734 +1.08924024924636 1.87644591554999 +-1.99031689018011 -0.154680736362934 +-1.27192595601082 2.51909646391869 +1.49451681599021 2.51418820768595 +-0.64579126611352 -1.11740208417177 +-0.832049537450075 0.954464290291071 +-0.973603509366512 0.684025395661592 +-1.74200744181871 2.24982177466154 +-1.55338723957539 1.5635126195848 +0.498314447700977 0.925668086856604 +-1.03532092645764 -1.26874169707298 +0.0863930359482765 -0.189795970916748 +2.53415023908019 1.75097864121199 +1.95521479099989 -0.153398126363754 +1.81359706819057 -0.842946372926235 +-2.19453426450491 1.11385210230947 +-1.49278014153242 1.2342741265893 +0.365170482546091 0.566581688821316 +-1.73910537734628 1.27218621969223 +-0.0716397538781166 0.762507896870375 +-1.13677383214235 -1.40080473572016 +1.41731663420796 1.45562671497464 +-0.813192099332809 1.40217902883887 +1.58761820197105 1.62448981031775 +-0.0383275263011456 0.188447199761868 +0.156074311584234 -0.972646787762642 +1.04042853787541 0.18900977820158 +-0.88936959207058 -1.31156947091222 +0.310843884944916 0.0197458788752556 +1.06750833615661 -1.24275540933013 +-1.33583645150065 -1.04819202050567 +-0.79921168088913 1.96500128507614 +-1.14577700570226 1.72604294493794 +0.55578202009201 0.149394474923611 +1.6775561273098 0.617094475775957 +-1.01103469356894 -1.31929495185614 +-1.38710689544678 0.371917717158794 +1.58953529596329 1.15711507573724 +-0.609536353498697 -1.78238146752119 +-0.660511009395123 0.221203226596117 +-0.98776064068079 0.321369010955095 +-0.562292378395796 1.66471489518881 +-1.77953666448593 2.33731785044074 +-2.05996615067124 2.10170586407185 +-0.67828768491745 -1.6449521034956 +-1.42432636395097 2.09065570309758 +1.18407428264618 -1.07858979329467 +-1.56586059555411 2.38309692591429 +1.76079827174544 1.7656717300415 +1.00458079576492 -1.57539673894644 +0.854396693408489 0.873832266777754 +0.185944989323616 -1.42763789743185 +2.63178929314017 1.48056826367974 +1.41512761637568 2.69997546076775 +-0.0744541361927986 -0.629871290177107 +-2.38244127854705 2.23857615888119 +0.86452529579401 0.70793978869915 +1.6617938131094 -0.366358235478401 +1.12168938294053 0.554337944835424 +-0.335315600037575 -0.545378278940916 +-0.771512437611818 -0.492312639951706 +-0.812581408768892 -0.437310520559549 +1.1439051553607 1.56093442812562 +0.356298889964819 -1.24660977348685 +-1.96105089411139 1.70243603736162 +-0.306045740842819 -1.77931379526854 +1.17218549922109 0.484342470765114 +2.06145634502172 1.46781897172332 +1.21990863978863 2.58258854597807 +1.41946555674076 0.626814037561417 +-0.956472232937813 1.45675296708941 +0.0898967944085598 0.403933893889189 +-0.599855229258537 -0.746526971459389 +-0.297540672123432 1.74059614911675 +1.75150639936328 0.864683594554663 +-1.56660597026348 -0.987871166318655 +-1.0763506218791 -0.922245759516954 +-1.47208060324192 2.53831192478538 +1.40448072552681 1.74104551225901 +2.25324134156108 1.7964154407382 +-1.33119038119912 -0.75755737349391 +-0.470253024250269 -1.34057439118624 +1.55878097563982 -1.22126707434654 +-0.658056698739529 0.837491177022457 +2.07954757660627 1.61483786255121 +-1.6142706759274 -0.0775446482002735 +-1.72180998697877 0.894506551325321 +-1.77484543994069 2.25939275324345 +0.631723877042532 0.193278230726719 +0.139369953423738 -1.70229246094823 +1.90830347687006 2.12659001722932 +-0.343708775937557 0.169108223170042 +-1.88543978706002 0.0185515768826008 +-0.835481379181147 -0.627411779016256 +0.170110311359167 0.464201781898737 +0.362088982015848 -0.414250120520592 +0.676200110465288 -0.757670100778341 +-1.23140157386661 1.48773692920804 +2.16286908835173 2.44291415438056 +0.0731420516967773 -1.72598065808415 +1.76684502512217 1.16587085649371 +-1.39835537225008 -0.0683176778256893 +-0.976873468607664 -0.358734451234341 +0.349316116422415 -1.03438420593739 +0.275296498090029 1.82352923601866 +-0.0486390851438046 -1.0648463703692 +-0.691233392804861 1.77738162502646 +0.782562240958214 -0.912443399429321 +0.268867425620556 1.17994451895356 +1.05905979126692 -1.63457971811295 +1.28684512525797 1.78340581804514 +-1.68673431873322 1.1419372856617 +-1.86213435977697 0.660303466022015 +-0.922318480908871 -1.23344100266695 +-1.29328970238566 -0.145831905305386 +-1.92296160757542 2.54331916943192 +1.25193921849132 0.197594109922647 +-0.0282733142375946 1.81638247147202 +-0.0894774571061134 0.299551676958799 +-0.17873651906848 -1.13632128760219 +0.384060919284821 1.65990968421102 +0.736429821699858 -0.804072305560112 +0.341185420751572 0.825544342398643 +-1.52481646835804 1.01968801394105 +1.24904447793961 2.492934435606 +-0.246172867715359 -1.68664997443557 +2.44707094877958 1.13160264492035 +-0.587629217654467 -0.154323428869247 +0.8269003033638 -0.983990766108036 +-0.653048608452082 -1.13745626062155 +2.36154668778181 1.95362406224012 +0.747401397675276 1.46474760398269 +0.23150248453021 -1.69140166789293 +-1.30822554603219 0.989471554756165 +0.0250147171318531 1.26661954820156 +0.333794616162777 -0.812536582350731 +-2.26338439062238 1.72183839604259 +-0.095207467675209 0.205210562795401 +1.92536081746221 2.27824918925762 +0.96856377646327 -0.555654179304838 +1.01179733499885 2.14592017233372 +-0.137700248509645 1.1115278378129 +-2.33445109799504 1.19454141333699 +0.385220274329185 0.573728416115046 +0.858502186834812 0.558201227337122 +-2.35866514593363 1.38543798774481 +0.471359882503748 -0.49472314491868 +-1.42862964048982 -0.17214086279273 +0.881183218210936 0.571060858666897 +-0.644852742552757 1.7104775942862 +1.12737057730556 -0.999339956790209 +-0.50034861639142 -1.29400199279189 +-1.8826592490077 2.37473522871733 +0.58922266587615 0.395736653357744 +1.17411537468433 -0.351684376597404 +2.16339967772365 2.29587806761265 +1.01638673618436 -0.449155565351248 +-2.48747099563479 2.12833416834474 +2.32781500741839 1.04579991847277 +1.76985342055559 0.422601651400328 +-1.20213773474097 -1.38144750148058 +-0.334887456148863 0.800697203725576 +2.32786116376519 1.76354138553143 +-0.345368184149265 0.131021935492754 +1.64940991997719 2.12143660709262 +0.785865660756826 -0.494428090751171 +-0.730388719588518 0.810137208551168 +1.10482731461525 1.45447611063719 +-0.808123476803303 -0.415112499147654 +1.64594238623977 0.115736853331327 +-2.45943002775311 1.88566955551505 +-0.019577719271183 -1.58845267444849 +1.75087888166308 2.71905419975519 +0.483988083899021 1.77430965006351 +0.196773879230022 1.25022590532899 +1.93434988334775 1.63726980611682 +1.61363665014505 1.29101568087935 +-1.95702186971903 1.89654012396932 +-0.408130168914795 1.90406507253647 +0.358841374516487 1.9074446670711 +0.734227612614632 1.0348468311131 +0.926233738660812 1.50798469781876 +-1.22225850075483 0.385461945086718 +-0.916059222072363 -0.653809953480959 +-1.28455968573689 0.140168085694313 +-1.10237107798457 0.758745968341827 +0.96965105459094 0.513448435813189 +-0.783259902149439 1.12821429222822 +1.37483804672956 -0.753170106559992 +0.548634763807058 0.323309846222401 +0.623558141291142 0.950037308037281 +-0.692744251340628 0.226652916520834 +-1.23139430209994 -0.422291941940784 +-2.42365545779467 2.34680910035968 +1.37231296673417 -0.888339523226023 +1.25555715337396 -1.15495608001947 +-1.51518706977367 2.10996996983886 +1.08320129662752 -1.51856808364391 +-0.18073258548975 1.41830313205719 +1.83971151709557 2.024000428617 +0.246762745082378 -1.27389849349856 +2.05680235475302 0.867634724825621 +1.11226580664515 -0.348583023995161 +1.10052146017551 1.58980524912477 +1.85381131991744 0.641317464411259 +-1.2828180976212 -1.08724246919155 +-1.76682638376951 1.22056019678712 +0.018401775509119 1.33275803551078 +-1.1854879334569 1.39286181330681 +-2.5813794657588 2.25599414855242 +-1.02185039967299 2.16341564431787 +-0.919620957225561 -1.243093829602 +0.201122622936964 0.875365946441889 +-1.56594972684979 2.11610895767808 +0.597182877361774 -0.819117851555347 +-0.701289631426334 1.24994299188256 +0.255758602172136 -1.0020669400692 +0.197925999760628 -1.75779293105006 +2.18392858654261 2.53405466675758 +0.690229952335358 -0.657780580222607 +-1.0657757371664 1.72220869734883 +1.94221831485629 1.4538861811161 +1.56474012881517 0.88307947665453 +-0.599572572857141 -1.50246687978506 +2.61841187998652 1.77252304926515 +-0.745710007846355 -0.234991233795881 +1.36770543083549 1.85507631674409 +-0.116917885839939 0.243452284485102 +-1.68340345099568 0.704070884734392 +1.49266545847058 2.71682129055262 +1.29303632304072 -0.56181750819087 +1.06517909094691 -1.29622010886669 +-2.39727663993835 1.27236568182707 +-1.56593092530966 0.703008402138948 +0.410207115113735 1.72599062696099 +1.03478935360909 -0.133511520922184 +0.46577176079154 0.0797071047127247 +-1.5229262933135 -0.00491827353835106 +1.37839599698782 1.38016057386994 +0.973402991890907 1.37195426598191 +-1.53444562479854 2.44188645482063 +-1.71785110607743 1.58903403952718 +1.09160731732845 -1.47899010032415 +2.26828938350081 1.64024895057082 +-0.78251351043582 -0.0327897183597088 +-1.21273778751493 -0.434684287756681 +2.26084705814719 1.42731167003512 +-0.792846899479628 -1.82142630591989 +-0.680218312889338 1.09061854332685 +1.55124504119158 2.44316620752215 +0.37851994484663 -1.23815621063113 +-0.261066731065512 1.49317986890674 +1.79810737073421 -0.570220142602921 +0.565875977277756 0.00610850378870964 +0.161260053515434 -1.07373028993607 +1.56201288849115 0.779842436313629 +0.0223434790968895 0.190297890454531 +-0.0864974185824394 -1.31957776844501 +1.12282740697265 2.25825402140617 +-0.308795012533665 -0.641968447715044 +-1.85297350212932 -0.000255919992923737 +1.83108159527183 1.8919876255095 +-0.510490346699953 -0.67063196003437 +0.326094184070826 -1.59036732465029 +1.64905489981174 -0.753570161759853 +1.92529591172934 1.2674170807004 +0.782755050808191 -0.88158917799592 +0.425546415150166 0.0219052396714687 +-0.941311057657003 -1.42252350598574 +-2.28164589032531 2.31030131131411 +-1.09805969521403 0.612494077533484 +-0.196877311915159 1.18465990573168 +1.53359206020832 -1.01815540716052 +-0.60945612564683 1.27607395127416 +-0.151974059641361 1.38808980584145 +0.676863513886929 1.09757688641548 +-1.73976784199476 1.76589269191027 +-0.395219672471285 0.230592861771584 +0.815818171948195 -0.42065192386508 +-1.33185571432114 -0.898668121546507 +-0.116638887673616 0.19905024766922 +0.0158485658466816 -0.0880106389522552 +0.0746829248964787 -1.35300204157829 +1.68046057596803 -0.596471667289734 +0.293336275964975 -1.04184950143099 +0.432195611298084 -0.205857984721661 +-2.09260195493698 2.22491950914264 +1.72885094955564 1.58563318476081 +0.176573168486357 -1.86925553157926 +2.10949410870671 2.64717761427164 +2.18767710775137 1.07763171195984 +-1.16770657151937 -0.923097644001245 +1.41023437306285 -1.28155676275492 +-1.21971904113889 1.62579920142889 +-0.578925747424364 0.627533167600632 +2.37784032151103 2.16085527464747 +0.589245330542326 -0.290553119033575 +-1.70608828216791 2.42041115835309 +0.0613664537668228 -0.00860544294118881 +0.909182470291853 1.67989129945636 +-1.87066591158509 0.648443173617125 +0.556589268147945 1.70741699635983 +-0.546788476407528 -1.66313252225518 +0.506078828126192 0.580704886466265 +1.79312986880541 1.30806604027748 +-0.563491094857454 -1.73019643872976 +-1.91688002645969 2.56214925646782 +0.284545548260212 0.656870722770691 +0.642775930464268 0.700732726603746 +1.69304636865854 2.36845820769668 +-0.831103757023811 0.473324924707413 +-0.774071358144283 -1.79767268896103 +1.68082847818732 0.653393477201462 +1.81990025565028 2.68236497417092 +-1.07097993791103 1.7930474281311 +1.94984925538301 0.919484402984381 +2.76123372092843 1.73172958940268 +0.0956330187618732 1.30896363407373 +1.28230846300721 2.56451522931457 +1.12228440120816 -0.206950273364782 +-1.57961160317063 0.437907066196203 +2.28891426324844 1.88641811534762 +1.47517981380224 -1.30179155245423 +0.142558105289936 0.589871820062399 +-1.85516045987606 2.5426720045507 +-0.889446295797825 -0.767448224127293 +0.060591958463192 0.494419515132904 +0.0626354776322842 -0.702193040400743 +-1.84704908356071 1.75517785921693 +-1.24444667622447 -0.724383749067783 +-0.930836249142885 1.64518431946635 +-1.41284645348787 0.104280814528465 +1.70260412991047 -0.15326888486743 +2.4706604629755 2.10320093482733 +-1.88649175688624 2.73445561155677 +-1.5158298574388 1.5834923684597 +2.06200086325407 1.91274541988969 +0.926703192293644 -1.35055538639426 +0.968811720609665 -0.934115614742041 +-2.07020248100162 1.43360640853643 +2.20479379966855 1.21053855866194 +1.65269929543138 0.177330266684294 +-0.382580626755953 -0.510838218033314 +1.27959142252803 1.89596106484532 +-1.62755922228098 -0.220698688179255 +1.75927284732461 0.415262296795845 +1.41609141230583 0.731934577226639 +-2.26181201264262 1.76264552026987 +1.74077944457531 0.630616426467896 +1.10881999880075 -0.881391327828169 +-1.3217157535255 -0.688524983823299 +0.619230791926384 -0.24318727850914 +-1.22652271017432 1.69738034531474 +0.729095354676247 -1.60827789828181 +-1.16703080013394 -0.847164284437895 +2.28210901468992 1.23282488062978 +-2.32832760363817 1.81329910084605 +1.49602905660868 1.4018176831305 +0.82220346480608 -0.585836011916399 +-0.0737116597592831 1.6180949062109 +2.03095188364387 2.61934965103865 +0.437182947993279 0.622637845575809 +-1.60966883227229 -0.627398733049631 +-0.967199955135584 1.33069409802556 +1.62086469680071 1.12911984696984 +-1.53373060747981 1.85991260781884 +-1.11269273608923 1.81909773871303 +1.51931928098202 -0.622355528175831 +1.31138054281473 0.721410017460585 +1.32357081398368 -0.115576710551977 +-1.78687277063727 1.01808232069016 +-1.723517652601 -0.178363416343927 +-0.670558977872133 -0.0340761356055737 +-1.390510071069 -0.975454896688461 +-1.50509100034833 0.496806718409061 +-1.19014520570636 2.16579028218985 +0.158093150705099 -1.51457623392344 +0.17756624519825 1.70900921151042 +-1.29508331790566 -0.625781439244747 +-0.877573065459728 0.274310354143381 +1.24568874761462 0.58392671495676 +-1.10537868365645 1.5752484165132 +0.538040224462748 -0.0152369812130928 +1.40140888094902 -0.0791587568819523 +-0.19843515008688 -1.07681282237172 +1.63484471291304 0.847745914012194 +-1.6163287088275 0.23577381670475 +-0.285119641572237 -1.86953181773424 +1.41759844124317 1.32312982529402 +-1.48072937130928 0.919389273971319 +2.7164629586041 1.98075502365828 +-2.33210576325655 1.4953227005899 +0.896824080497026 1.27034582197666 +1.69743127003312 -0.85331767052412 +2.02689778059721 1.24907104671001 +-1.81603926420212 0.928874891251326 +1.03151476383209 2.19764101132751 +0.0666704587638378 1.48977917060256 +-0.891888409852982 -0.952536288648844 +0.0611691847443581 -1.38976871594787 +-0.741215765476227 -1.60374213755131 +-0.983322542160749 0.788801163434982 +-1.34928694367409 -0.00486821681261063 +-0.991624187678099 -0.933842059224844 +-0.524510983377695 1.02451016008854 +-1.8878178037703 2.30132402852178 +-1.80282521620393 2.23192574083805 +-2.69548732787371 1.41280471906066 +-1.09971207007766 1.63079483807087 +-1.65951097011566 2.29766901955009 +2.29889656230807 1.36104686930776 +-0.0483877100050449 -0.725728318095207 +-0.368966910988092 -0.337788373231888 +2.25388148799539 1.64733375608921 +2.11559322103858 1.08322706446052 +1.57140509784222 0.312953159213066 +1.81547360867262 0.627422451972961 +0.636260103434324 -1.38700498268008 +-1.56669459491968 2.30149949714541 +0.545926697552204 1.13306342437863 +-2.72313288226724 1.5716561563313 +-1.02234153077006 0.236824553459883 +-0.469311140477657 -0.565301917493343 +1.01707030460238 -1.48222672939301 +-0.911134824156761 1.94073479622602 +0.277921639382839 0.495407368987799 +-0.224498882889748 -0.708679866045713 +0.488169047981501 0.657226905226707 +-0.476981069892645 -0.644570153206587 +-1.25887857005 0.929413001984358 +0.0500610768795013 -1.09681406244636 +0.34292571246624 1.66492416709661 +0.255388088524342 0.618883579969406 +0.178956434130669 1.21992049366236 +2.74666523188353 1.87850533798337 +-1.21695541217923 1.60067336633801 +-1.41202061623335 1.98652771487832 +-0.914179146289825 0.842000294476748 +1.28773294761777 -0.545925911515951 +0.922058928757906 2.16350819170475 +2.59011177718639 1.96633694693446 +-2.50322905555367 2.43140615895391 +-1.58764788135886 -0.432073809206486 +1.49271377176046 0.115800462663174 +0.231257449835539 -1.3261340893805 +1.80699044466019 -0.792813833802938 +1.8436005525291 0.565507587045431 +-0.99509759247303 -0.385737534612417 +0.612893123179674 1.61557329073548 +1.36229495704174 1.69931225851178 +0.926931407302618 1.58039037138224 +2.46081982553005 1.21403437107801 +-2.01126836240292 2.1135349534452 +0.641887944191694 0.597592566162348 +-0.39684559777379 -0.208799280226231 +-0.761936172842979 -1.01453436166048 +0.064405020326376 1.95447036251426 +-1.83273086324334 2.17447444424033 +0.488131407648325 -1.9255444817245 +2.11567686870694 1.7806500941515 +1.39353952929378 0.0914776399731636 +0.384345557540655 1.16242761537433 +-1.28926843032241 1.00886202603579 +0.572868313640356 0.250744823366404 +0.531876873224974 -0.553940549492836 +0.649913366883993 -1.48486177995801 +-1.16767395660281 0.965199399739504 +-0.272812251001596 0.241462372243404 +-1.84488851204515 0.963095009326935 +1.47669460996985 -0.888386309146881 +-1.79664256051183 1.24869180843234 +2.16403616964817 1.06574932485819 +0.170340280979872 0.913764525204897 +-1.35368482023478 0.539881635457277 +-1.06732605025172 -1.52756758779287 +-0.591396894305944 -1.05309298634529 +1.73709803074598 1.26303900033236 +-1.49613188952208 -1.3190787807107 +-1.31895628198981 0.846436183899641 +-0.130883030593395 -1.84094346314669 +1.00154972821474 1.53235757723451 +0.0576329380273819 0.780269138514996 +-0.639979235827923 -1.2637140378356 +0.790073912590742 -0.271806586533785 +-1.34229910001159 2.29393865168095 +-1.07706803455949 -0.927376493811607 +0.272692807018757 0.917201790958643 +-2.53351761773229 1.32547678798437 +-1.45048862695694 2.65606252849102 +-0.809473473578691 1.69434504583478 +1.0594424456358 -0.15083584189415 +0.225159298628569 1.08821665495634 +-0.491480275988579 0.122546587139368 +-0.947186700999737 -0.384588275104761 +-1.73905735835433 1.20298994705081 +-2.08146171271801 1.07529098540545 +-1.28563325479627 1.44508855417371 +-0.566826410591602 -1.03822178021073 +1.23021031916142 1.47194290906191 +2.53720383346081 2.26379666849971 +1.21171427518129 1.08897963538766 +0.832529783248901 -1.68915913999081 +0.584701422601938 0.433161925524473 +1.6209946423769 -0.865348625928164 +0.0787889435887337 1.48742830753326 +0.214969385415316 -0.285262417048216 +-1.22086392715573 0.105656560510397 +1.48201066255569 -0.645508803427219 +1.18055355548859 -0.947616428136826 +-1.98597376421094 1.25557527691126 +-2.27991250157356 1.99887488037348 +-0.78177222982049 0.147926345467567 +1.60293316096067 -1.07913783565164 +1.41450773924589 0.669970542192459 +1.14717435836792 1.75159204751253 +0.662808567285538 0.638735186308622 +1.49656802415848 2.159075435251 +0.0372371189296246 0.656317383050919 +2.04057704284787 2.32506604492664 +-0.156101826578379 1.8899129666388 +0.936846390366554 1.91507167741656 +0.0489499270915985 1.53936304524541 +0.664488397538662 -1.18443086370826 +-1.36106698215008 1.01604156568646 +-1.74571157991886 1.10271691530943 +1.7576251141727 -0.946905728429556 +0.895610351115465 -0.0157884731888771 +-0.268405687063932 -1.79656484350562 +0.228204809129238 -0.871798127889633 +1.54991286993027 0.281774546951056 +1.93305699154735 1.70436038821936 +1.79203936085105 0.828338816761971 +-0.0938997454941273 1.67745372653008 +0.280732370913029 1.55548966675997 +1.3392932638526 0.744939520955086 +0.713812086731195 -1.55471259728074 +-2.73535241931677 1.82324456050992 +-1.54673070460558 0.992421608418226 +0.0391140915453434 0.219440426677465 +0.970250111073256 2.10399927198887 +-1.3327379450202 0.762428589165211 +-1.01999524608254 1.44345911219716 +-0.366057608276606 1.70241189748049 +0.37034098431468 -0.329273909330368 +-0.213392369449139 0.625289279967546 +-2.19691159576178 1.2868390083313 +2.57499992102385 2.22774219512939 +-0.289193466305733 0.37096980959177 +-1.82748784869909 2.27353348210454 +-0.580534566193819 0.390047613531351 +1.61170377954841 1.58508635684848 +1.81351255625486 1.4125995747745 +0.807440266013145 0.155545718967915 +-0.26973707228899 -1.87186853587627 +-1.53041150048375 -1.16994150355458 +-1.15831179171801 1.24129266291857 +-0.185020450502634 -0.233535714447498 +-1.78148758411407 0.566196385771036 +-0.44498772546649 0.174107298254967 +2.72741859778762 1.99566528946161 +0.349529825150967 0.561652716249228 +0.744035113602877 -1.61445150524378 +0.307313654571772 1.85936356708407 +-0.950618378818035 -1.54908441752195 +1.21716873347759 0.845637246966362 +2.08693038672209 0.977405861020088 +-1.46714685484767 2.29135932400823 +-0.108098175376654 -0.564654212445021 +0.912437718361616 1.5384126342833 +2.54080364853144 1.45338047295809 +-1.57359937950969 0.153300724923611 +1.69789524003863 -0.950402654707432 +-0.167669508606195 -1.10605482012033 +-1.86962626501918 0.522043514996767 +-0.386042661964893 -0.943443361669779 +2.59800373762846 1.36050034686923 +-0.372303027659655 -1.15046956017613 +1.87921287491918 2.60888760909438 +0.637267287820578 -1.45456557348371 +-1.53238688409328 2.23219892382622 +1.65842662006617 1.86656299605966 +-2.34383913502097 2.04135026037693 +1.01732532680035 1.81847020611167 +-0.550810065120459 1.14504714310169 +-1.7605785690248 1.50713912025094 +1.71282342448831 -0.831966806203127 +-1.72171159461141 2.26648715138435 +0.179256163537502 1.23518867790699 +2.32903185859323 0.994956366717815 +-0.0677552297711372 -1.80267589911819 +2.37686232104897 2.48427456989884 +0.237344231456518 1.49717311561108 +0.912864185869694 0.840335384011269 +2.47731795161963 1.08654200658202 +0.573028687387705 1.71985928714275 +1.23568926751614 0.534201890230179 +0.149090945720673 1.62167240306735 +1.05418821424246 -0.932928055524826 +-0.497737046331167 -1.34645667672157 +-0.219289317727089 -1.03737621381879 +-0.504253376275301 -1.73081237450242 +1.85477861389518 0.840651277452707 +-1.90980485081673 2.06389744579792 +0.838188823312521 -0.992099154740572 +-0.662639122456312 -0.606291197240353 +0.93050741776824 1.82059735059738 +-1.58754169568419 -1.048682898283 +0.236433282494545 -1.07923072576523 +-1.81358762830496 0.582290854305029 +-0.911956358700991 -0.190612051635981 +0.410448785871267 1.09035421907902 +-0.105036098510027 1.45273659005761 +2.36958514153957 1.96999927982688 +-0.956904292106628 1.21022015437484 +0.388642773032188 -1.43329676985741 +1.7820448577404 -0.441421333700418 +0.406532373279333 0.0569574534893036 +1.40646942332387 2.46075023338199 +0.375434752553701 0.184313498437405 +-2.61266023665667 2.09099476411939 +-0.827427390962839 -0.428885065019131 +-0.76458065956831 0.154867682605982 +1.12212892994285 0.433277014642954 +0.255253061652184 -0.0663466043770313 +-1.32499407231808 0.0817271396517754 +-2.57289637997746 1.66537137702107 +1.57301988825202 1.62257969751954 +1.41108664870262 2.44251919910312 +-0.0499166510999203 -1.68686012923717 +0.577296178787947 -1.14776103571057 +0.90095916762948 -1.44755472242832 +0.731888506561518 -1.58835767582059 +2.30513063445687 1.01582721248269 +-0.891015484929085 2.17260812968016 +-1.57982877641916 1.18252457678318 +-1.24303381517529 -1.20163783431053 +-1.94873994588852 1.93310809880495 +-0.698974385857582 -0.990444324910641 +2.42902544140816 2.38442787528038 +-1.3298335634172 -0.859347347170115 +1.26103292778134 2.12950172275305 +-0.823185008019209 0.00435494258999825 +0.888731759041548 0.512242317199707 +2.60245840623975 1.26199517771602 +0.607693165540695 1.15910234674811 +-0.761340692639351 1.04481665417552 +-2.31682937219739 2.06720039248466 +-1.78458118066192 2.66412539407611 +-0.0059196874499321 -1.46794664859772 +0.276506938040257 -0.346428137272596 +-1.4361298494041 1.46149464696646 +-0.566859245300293 -1.23728042468429 +-1.60655161365867 -0.828207962214947 +-0.358011517673731 -0.081193171441555 +-1.95992579311132 1.18862934410572 +-0.268683530390263 -1.19764831662178 +0.322577152401209 1.64992267265916 +1.06229820102453 1.68056371062994 +-0.398804951459169 0.876927878707647 +0.398539591580629 0.440097715705633 +-1.14696784690022 -0.998987708240747 +-2.31554893404245 2.47706303372979 +1.4845386967063 1.7869251742959 +-2.04644411802292 1.52019582316279 +-0.994995441287756 -0.187758475542068 +1.26691315323114 1.06539598107338 +0.89561090618372 2.09390673041344 +-2.63772695884109 1.76918847858906 +1.76934976875782 0.455108698457479 +-1.45442174002528 -0.0358322039246559 +1.15083183720708 -1.45039723813534 +1.33251246064901 1.33499390259385 +-1.28184078261256 2.10334265232086 +-0.425440400838852 0.989083636552095 +0.502656504511833 -1.51659203320742 +1.27791387587786 2.12094269692898 +-1.13161554187536 2.19003639742732 +1.41817326843739 -1.10621114820242 +-0.33081965520978 -0.739036966115236 +2.39311796799302 1.5885584205389 +2.44695172086358 1.87023855373263 +-0.35288554802537 -1.067208558321 +-1.51111628487706 2.45946690067649 +2.50345315784216 1.86981128901243 +1.62873644754291 1.57597681134939 +1.50036338344216 2.39540728181601 +-1.18898620828986 1.70663802325726 +0.611395183950663 0.744647111743689 +1.30959567055106 -1.21204398572445 +-2.09038729593158 2.50345008447766 +2.17389302328229 1.12042484804988 +1.71763078868389 1.26232958957553 +0.0299554988741875 0.141239821910858 +2.51866658404469 2.12993520125747 +2.57905870676041 2.14430571720004 +-1.52984654158354 -1.14340731501579 +1.12029530480504 -1.32255576550961 +-1.9051465280354 1.94021867960691 +1.12665386870503 -1.31771874055266 +0.364515837281942 -0.129475422203541 +0.702455375343561 -1.51655333861709 +-1.50132653489709 -0.0526272468268871 +-1.45397531986237 0.24686885997653 +1.75284482166171 1.93122955039144 +-1.33498594164848 -0.0634261667728424 +0.0934062376618385 0.784028809517622 +1.85572108253837 1.29937311634421 +-2.22731291502714 1.52680213749409 +-0.0933824330568314 1.57857684418559 +-0.979915902018547 -0.0702734887599945 +1.56477003544569 2.25071778893471 +-0.959630630910397 -1.15793731063604 +0.157959323376417 -1.34013946726918 +-0.737919811159372 -1.05918873101473 +2.38214736431837 1.9711945950985 +1.89130400493741 -0.619348458945751 +1.84728472679853 -0.381393633782864 +-0.113117955625057 0.844208762049675 +0.394897744059563 -0.723329462110996 +0.209996219724417 -1.02090506628156 +-0.546070203185081 -0.079874973744154 +-0.334173519164324 1.06267572566867 +-1.60989946499467 0.393895614892244 +0.265554334968328 0.733590610325336 +-1.56382738798857 1.48007407039404 +-1.59331288561225 -0.0772055312991142 +0.355891704559326 0.418472487479448 +1.25897985696793 0.898029260337353 +1.24142132326961 1.67154293879867 +-1.43809934332967 -1.06487549096346 +0.478734955191612 1.75573720410466 +1.14016408100724 0.527297403663397 +1.3988200686872 1.47975045070052 +2.10610714927316 2.05240352079272 +2.56628660485148 2.14864921942353 +1.72014982625842 0.810406494885683 +1.27081394195557 2.3523774407804 +0.1376963108778 0.685361381620169 +0.650287237018347 1.8028151653707 +-0.892620451748371 -1.4409867785871 +-0.0059429258108139 0.475558150559664 +1.81071783974767 -0.809903156012297 +0.856825795024633 -0.594857472926378 +1.93927256762981 2.40168114006519 +0.389690551906824 0.720890108495951 +0.368176072835922 1.68052316084504 +-0.816919300705194 0.830580696463585 +-1.82231476902962 -0.415307972580194 +-0.0420442931354046 1.38938356563449 +-1.61892575025558 -0.578376688063145 +0.17215122282505 0.983381733298302 +0.413769725710154 1.33836034685373 +0.876108784228563 -0.717799678444862 +0.978105463087559 -1.50280061364174 +1.7042604200542 0.223923552781343 +1.35787532478571 -1.01579950377345 +1.30690402910113 0.47226694226265 +1.79082449525595 -0.245617866516113 +-1.0665135383606 -0.335841733962297 +0.434857707470655 1.74731096252799 +2.02600954473019 1.17920038476586 +0.29687238484621 0.560973059386015 +-1.50222005322576 1.08809718862176 +1.30161612853408 -0.634184326976538 +0.440865766257048 0.0700816214084625 +0.540520492941141 1.62371852993965 +-1.26385125145316 -1.16543627157807 +-0.369420785456896 0.119578745216131 +-1.63786680623889 1.16069642454386 +1.31151196360588 -0.569051165133715 +1.66202279180288 2.04398788884282 +-1.04128251969814 -0.49883459508419 +0.134063802659512 -1.69617769122124 +-0.835130412131548 -0.284255415201187 +1.36764487251639 1.97548695653677 +-1.26676482334733 -1.27638270333409 +-1.65277803689241 -0.0980307832360268 +0.393661260604858 -0.58405077829957 +-1.4036953561008 -1.13278026878834 +-0.961147509515285 0.37464489787817 +0.514735650271177 -0.436613161116838 +-0.610771678388119 1.61136303469539 +0.377628263086081 -0.157033063471317 +-1.21601602435112 -0.449282955378294 +-0.92775471508503 -1.4871588498354 +-1.20186945796013 -1.20086904242635 +1.5658906288445 1.75478062406182 +1.00770181789994 -0.0971661508083344 +-2.2265404984355 2.12134661525488 +2.31702414155006 2.60033534094691 +1.6667436696589 0.258019533008337 +2.38840765878558 2.5365157276392 +0.914122145622969 -0.159268658608198 +-0.861069142818451 0.415800984948874 +-1.99368368089199 1.22357387840748 +-2.32837038487196 1.07156844809651 +0.637874379754066 0.807610377669334 +-1.97859653457999 0.894367631524801 +-2.4730394333601 1.99911715462804 +-0.348289042711258 -1.42032632604241 +-0.427647441625595 -1.36548142507672 +-2.39289379492402 2.14361469820142 +-0.678169004619122 0.94526419043541 +1.15608689188957 1.29473279044032 +0.899646621197462 1.05531456694007 +-0.533419393002987 -0.656032115221024 +0.384348407387733 1.37230394035578 +1.43935928121209 0.170301023870707 +1.26021612435579 1.24583593383431 +-1.85961277782917 2.37738449126482 +1.79011210054159 0.586451753973961 +1.60756738483906 -1.07187079265714 +1.08104798197746 -0.600016880780458 +-2.37066113576293 1.93158183246851 +1.23831867799163 -0.654413405805826 +2.61824495717883 1.2814218364656 +-0.024267416447401 1.57576671987772 +-0.23735199868679 -1.33640214055777 +1.14103074371815 -0.569813970476389 +0.397139150649309 0.473676394671202 +1.34472202882171 -1.14792832732201 +0.20483797416091 0.54976349696517 +-0.353763535618782 -1.7415931224823 +-0.832949012517929 -0.20365297049284 +2.03665675595403 1.81587393954396 +-0.919493805617094 -1.4908574745059 +1.38112975656986 0.452760193496943 +0.537616591900587 -0.337820697575808 +-0.969094637781382 1.98362229391932 +0.417041879147291 -0.879981510341167 +-0.894966777414083 1.00390527769923 +1.4940273091197 -0.296205826103687 +0.487503416836262 0.686869390308857 +-0.655636008828878 -0.631791513413191 +2.56971618905663 2.12401909008622 +-1.10978940874338 1.74223434180021 +0.843419671058655 -0.985003754496574 +0.278898481279612 1.76494384557009 +-0.893590785562992 1.99969716370106 +-0.697027519345284 -0.423989236354828 +1.15794115886092 -1.30676657706499 +-0.64705852791667 1.2944189235568 +-1.67346792295575 1.69811214134097 +-1.75255184993148 -0.276170670986176 +-1.2250557243824 -1.48784407973289 +-0.516881573945284 -1.64875458925962 +1.30379556119442 -0.580481871962547 +-1.47093938663602 0.618156716227531 +-1.47061355784535 2.34062577784061 +-0.0693039000034332 -0.645248431712389 +1.37899317219853 1.13952737301588 +1.40671913325787 0.499057035893202 +-1.45265284925699 0.618222132325172 +2.20767144858837 1.71245861053467 +2.21819017082453 2.42979707568884 +0.449153892695904 0.364851236343384 +1.01592741534114 -0.0211044549942017 +1.1886342652142 -0.161799240857363 +-1.32071242853999 -0.0217834264039993 +-1.52155177295208 -0.0689328275620937 +2.19729841127992 1.83405246213078 +0.18730166181922 0.30084040760994 +-2.28835191577673 1.73904897645116 +2.22624374181032 1.84274322539568 +1.70331037044525 0.0875893905758858 +-1.70143084973097 2.5313368216157 +0.689890310168266 -0.0851849727332592 +-0.195802111178637 0.156358048319817 +0.2762247659266 0.565471392124891 +-1.59997211769223 2.67919213697314 +2.28542390465736 1.88504467904568 +0.356805592775345 -1.1281246393919 +-0.292519375681877 0.664258133620024 +-1.55637502670288 0.105042163282633 +0.820241071283817 1.56687965989113 +0.923712160438299 0.498733058571815 +0.716437727212906 -1.65909921750426 +1.00222176685929 -1.36814823374152 +1.96161265298724 1.00658824294806 +0.75193715468049 0.941245455294847 +-1.73682035878301 1.27487785741687 +-0.190508358180523 -1.60139552131295 +-1.5123692676425 -1.1802624464035 +-0.0059867799282074 -1.97909510135651 +-1.60202307999134 1.65031972154975 +-2.07363090664148 2.40118822827935 +1.27074116095901 2.16915826499462 +-0.929879277944565 -0.942068055272102 +-2.19018035009503 1.12057077884674 +-0.618774920701981 0.653650093823671 +0.404862709343433 0.835357043892145 +-1.2990737259388 0.74505315348506 +-2.43307105079293 1.96347941830754 +1.69323185458779 -0.317956086248159 +-1.56701659411192 -0.0709917545318604 +0.456104882061481 0.290842853486538 +-0.154775872826576 0.710269775241613 +-1.17758930101991 -1.52483279630542 +-2.06967787817121 2.60323222354054 +0.181483678519726 0.541051026433706 +-1.58884989842772 2.59251540899277 +0.434703588485718 -1.04729443415999 +-0.111541967839003 -0.54219126328826 +1.14618043974042 -1.59237472712994 +-0.525696821510792 0.527538768947124 +-1.74552588909864 2.49139680713415 +-0.980686388909817 -1.17306500673294 +0.481017407029867 0.917503941804171 +0.642716523259878 -1.81989677995443 +-2.32214703038335 1.78755870088935 +0.368780586868525 -1.89146944880486 +0.447178512811661 -0.100253600627184 +-1.67479970306158 -1.05370793864131 +-0.126174993813038 0.317470286041498 +-2.105195119977 2.22897313535213 +1.34307173267007 -0.182876031845808 +-0.679625183343887 1.337511703372 +-2.40379395335913 1.90253117308021 +-1.24340518563986 0.454017203301191 +1.38149727135897 -1.29146923497319 +1.81227650120854 -0.796355165541172 +-1.46922712773085 1.12029860541224 +-1.8309818841517 1.24835627526045 +0.784670535475016 -1.63535778596997 +-1.89716428890824 1.81475920975208 +-0.274419616907835 -1.46991498395801 +1.82331079989672 0.0575838498771191 +-0.20340371131897 0.659047979861498 +-0.730206459760666 1.10671497136354 +2.43966299667954 1.94304651767015 +0.804547846317291 1.36992582678795 +0.241601441055536 1.79559471085668 +-1.28922840207815 2.34182771667838 +0.497756782919168 -0.0193370915949345 +0.225997556000948 -1.76166791468859 +2.29571657627821 1.4221141114831 +1.77844596654177 -0.294086303561926 +-0.0755410753190517 1.49131742492318 +1.63189370185137 1.68161187693477 +-1.19591901823878 0.0837389454245567 +1.84714594483376 -0.0077219046652317 +1.85118484869599 0.256543219089508 +-1.38454555347562 -0.580051776021719 +0.978036448359489 2.22976576536894 +-0.919768866151571 -0.409550279378891 +-1.12735415995121 0.912390612065792 +1.0821190699935 0.761306993663311 +1.82932646945119 1.71994318068027 +0.179870285093784 1.96101005747914 +-1.2814727909863 2.0180472843349 +-2.03376595675945 1.38530955463648 +1.8627825640142 -0.440008856356144 +-0.640662614256144 1.48673802986741 +1.78899522870779 2.1273214109242 +2.26866844668984 2.384680762887 +1.54325044900179 -0.340375650674105 +1.15022388845682 1.0811296440661 +0.929324027150869 2.16113927215338 +-0.164400793612003 -1.5059422403574 +-1.1464924402535 0.583713717758656 +-1.01908338814974 -0.579786021262407 +0.0775572694838047 0.771664153784513 +0.894621223211288 -1.46210284903646 +-1.13626752793789 -1.08879090845585 +-0.81680341437459 1.1564492508769 +-0.364189777523279 0.296120367944241 +-0.849766608327627 -1.5796903707087 +1.32096638530493 2.61616801470518 +-1.19123157858849 -1.31713347136974 +2.75436544790864 1.64717119559646 +-0.495878625661135 1.3733596727252 +-0.674911215901375 -0.316060144454241 +-2.38973074778914 1.83849219605327 +0.522478874772787 -1.52490867301822 +-1.26288940384984 -0.288744561374187 +-1.34450083225965 -0.695344541221857 +0.141501717269421 -0.217302545905113 +0.616669427603483 -1.22108491137624 +-0.777845688164234 -0.492444042116404 +0.157710656523705 -1.21638035401702 +1.03862762451172 -0.684178497642279 +1.61081700026989 1.06742878258228 +-0.268880732357502 0.506149839609861 +-0.517387241125107 1.9238474406302 +-0.848382126539946 -0.869513459503651 +-0.255858853459358 -0.53773682564497 +1.01688129454851 -0.189151734113693 +0.736052617430687 1.21212351322174 +0.86587155982852 1.1599845290184 +1.47401367127895 0.380536649376154 +-1.06641667336226 0.595900729298592 +-0.45684889703989 1.53089993074536 +-1.22101172432303 1.89877569302917 +-1.10129772126675 2.50638110935688 +1.16959361359477 0.48591598123312 +-1.0980959199369 -0.415830090641975 +0.786592178046703 -1.35914334282279 +-1.78402872383595 2.06993676349521 +1.04275629669428 -1.40993741899729 +1.37661536410451 -0.80097559094429 +-1.49332259967923 1.50496188178658 +-0.746557895094156 1.52429186925292 +1.0913096293807 1.83806129544973 +0.163175974041224 1.62864415720105 +-0.566376958042383 -1.80791668593884 +0.960455130785704 -0.825466349720955 +1.24771428108215 -0.73545016720891 +1.60542532429099 0.0121268406510353 +-1.0544749982655 1.09226652979851 +-1.13931595906615 0.104356527328491 +-1.64646294340491 -0.659289825707674 +1.4406676441431 2.32224276661873 +1.7553320042789 -0.252851728349924 +-1.38836501911283 -0.694072254002094 +1.41482836380601 -0.597630608826876 +2.27287254482508 1.65427045896649 +1.19889444112778 0.361937269568443 +-1.95573752373457 1.62076980248094 +0.80565145984292 -0.665323309600353 +0.871834624558687 0.765738226473331 +0.247457403689623 -0.673286892473698 +-0.715938590466976 -1.53411284089088 +0.675626467913389 0.439204093068838 +0.690410256385803 -0.881483074277639 +1.19501185044646 2.22326826304197 +1.29030184447765 -1.38656164333224 +1.25186143442988 1.00712873786688 +0.623483896255493 -0.796479880809784 +0.683162443339825 -0.0865858308970928 +-1.50305420532823 -0.121618505567312 +1.63056514412165 -0.436987958848476 +-1.68105244264007 2.65752547606826 +1.44420814886689 0.174672793596983 +-1.26183287799358 0.223524771630764 +0.24183164909482 -0.0854621566832066 +-1.83121830224991 2.06541942432523 +-0.454414796084166 1.45393660292029 +-1.80130152776837 2.49682863801718 +-0.247945420444012 1.02562333643436 +-1.09127113223076 -0.327345110476017 +0.841708600521088 1.2308318130672 +-0.208287503570318 0.0087767094373703 +0.477844033390284 -0.95173517614603 +2.15370114892721 1.73258008807898 +0.0277751833200455 1.07212381437421 +-0.834512189030647 -0.76938834041357 +1.4698203727603 0.343278247863054 +0.212362021207809 -0.523202694952488 +1.67474830150604 1.74200209602714 +1.30503389239311 -0.275630321353674 +0.530650362372398 0.768289875239134 +2.41535665094852 2.21368966624141 +1.68753091990948 1.62218472361565 +-1.29210621491075 -0.837977666407824 +0.55177041515708 1.14523799717426 +0.34705613180995 1.81420846283436 +-0.0945370346307755 -1.92309232801199 +-0.540610123425722 0.38219315931201 +1.77317015826702 2.62343763932586 +2.13802316784859 1.89163111895323 +0.851106848567724 -0.444340579211712 +1.92424508929253 -0.142830010503531 +0.443701487034559 1.89010849222541 +-0.982658468186855 2.27371086925268 +-0.0320519618690014 1.60271782055497 +-0.213675890117884 1.86096693202853 +0.321536790579557 -1.57691822201014 +0.85723378136754 -0.99940936639905 +-0.262773048132658 -0.201354902237654 +-1.25981993228197 0.217170022428036 +1.48457914218307 1.72874809429049 +0.258502177894115 -0.0113412290811539 +-1.70912768319249 2.31044908612967 +0.0154633894562721 0.00199912115931511 +1.64898265525699 2.36682896688581 +0.908942490816116 -1.12526040524244 +1.58299166336656 0.367870308458805 +-0.17417474463582 -0.984416011720896 +1.13834958150983 -0.531825721263885 +-1.42827661707997 -0.455232292413712 +-0.563003938645124 0.517271593213081 +-1.80291407555342 -0.375567354261875 +1.43213916942477 1.60681433975697 +-2.13291219994426 2.54592092707753 +1.2929090783 0.650164343416691 +-0.617932558059692 -0.718770559877157 +-0.0156455412507057 -1.2372669428587 +-0.154767591506243 -0.330696493387222 +1.44424206018448 2.40253547951579 +-1.17803985998034 -0.725196164101362 +0.560486778616905 -0.767852455377579 +0.914802126586437 -1.46202627941966 +-1.26151137799025 2.00018122792244 +-0.350205097347498 1.70760814845562 +1.20197219774127 0.548147562891245 +-1.22360518202186 0.549422565847635 +0.541384994983673 1.55098595097661 +-1.4013510607183 0.674092575907707 +-0.118368312716484 -1.13113866373897 +1.60313497111201 2.03603853657842 +0.79676154628396 -1.37127896770835 +1.95809760317206 2.2859320230782 +-1.30609513446689 0.0561747960746288 +0.23872734606266 -1.39615998417139 +2.64119137823582 1.92415256798267 +2.13987852632999 1.99318104982376 +1.42951334640384 1.30338386446238 +1.59520698338747 2.58379023894668 +-2.25626084953547 1.92965975776315 +-0.34362718462944 -0.944003589451313 +0.891293227672577 -0.438559390604496 +0.571002118289471 -1.2980370670557 +0.634465306997299 0.0907372608780861 +-0.167128726840019 1.10244342312217 +0.414248954504728 0.586766753345728 +-0.30742460116744 -1.35791079327464 +-1.15322747826576 -1.05186476930976 +1.53633574023843 2.67534543573856 +1.3686564899981 -0.683159340173006 +1.0381943359971 -0.725931208580732 +1.09639014303684 -0.789861977100372 +-1.26030647009611 -0.401182398200035 +1.95929991826415 1.47806249558926 +-1.0469377450645 1.3350724875927 +-0.0478621944785118 0.0285359509289265 +-0.979338452219963 -0.635956197977066 +-0.457668136805296 0.423141490668058 +1.80792539566755 0.285613056272268 +0.118463035672903 -0.747483380138874 +-0.293262362480164 -1.79403697699308 +-0.337631452828646 1.86901216581464 +0.37471541389823 -0.477653808891773 +1.13296503946185 0.181847658008337 +-0.106226179748774 -1.68134543672204 +-0.916659902781248 1.97626540437341 +0.188993159681559 -1.63200801983476 +0.851635582745075 1.29446846619248 +1.48896795511246 0.589173801243305 +0.822797793895006 1.52733925357461 +-1.0938744880259 -0.436939097940922 +0.715277560055256 1.69737946614623 +-2.29405589401722 2.31818549707532 +-1.36509624868631 -1.38934366032481 +-0.860897339880466 1.89794284850359 +-1.03786757588387 2.21245441958308 +0.101076353341341 -0.612427271902561 +1.73764118924737 1.4272792302072 +-0.454582463949919 -1.3748379573226 +2.37827999144793 2.52193464338779 +-1.2355357632041 0.864570029079914 +-1.60134495794773 1.4587447270751 +0.895950436592102 0.619292408227921 +-1.84069871902466 1.59564329311252 +-0.509245239198208 -1.70074565708637 +-0.0622214935719967 0.584657147526741 +-0.697744198143482 1.20116669684649 +1.71100710332394 0.169275589287281 +-0.0408185347914696 0.997597727924585 +0.000594101846218109 -1.05687214806676 +0.416754264384508 -1.11622320115566 +2.34526482596993 2.01530716568232 +-1.65512707456946 1.17785263061523 +0.481835443526506 0.706240154802799 +-2.43582519888878 1.76914202794433 +1.67047576978803 -0.883224252611399 +1.08582138642669 0.354916628450155 +-0.510197006165981 -1.50215250253677 +-0.208109311759472 -0.180511828511953 +-1.34228885918856 1.858344335109 +0.921794265508652 0.391728840768337 +-0.408211532980204 -1.11864904686809 +-1.00515166297555 1.744389526546 +-1.54482211172581 0.699582684785128 +1.6146975196898 -1.1371612586081 +-1.82293125987053 -0.224524918943644 +0.331550557166338 -1.80608564987779 +-1.12601596862078 1.35905980318785 +1.93348138034344 0.135811187326908 +-0.223418284207582 -1.0509461723268 +-2.38891025260091 2.32006052136421 +1.57426309213042 0.947741135954857 +1.46656782925129 0.93629464879632 +0.874217629432678 0.294763654470444 +-0.368723478168249 1.67125923186541 +-2.10210105031729 1.84999350830913 +-1.78079517930746 2.36073924973607 +1.82538150623441 1.08430660143495 +0.192891709506512 -0.895754106342793 +-0.818230543285608 -1.25698744133115 +-1.53010163828731 0.255758211016655 +0.739143125712872 1.46919421106577 +0.366209171712399 0.103951491415501 +-0.991409942507744 0.814032923430204 +1.14933681115508 -1.01211551949382 +-2.05785125494003 0.934719275683165 +2.54551661014557 1.38713537529111 +-1.34777837991714 2.17303104698658 +-0.818767469376326 -1.35269884392619 +-1.83539244905114 2.65543504804373 +-1.72357710450888 -0.636751811951399 +-0.631461672484875 1.44296007230878 +2.04979891702533 1.33724435791373 +0.36753561347723 -1.5222073122859 +-0.873394355177879 0.215070988982916 +-0.766983050853014 -1.45958619937301 +-1.3363248705864 -0.136341247707605 +0.0425540879368782 -1.65254486352205 +1.85485055297613 2.60904681682587 +-1.23717108368874 -1.08250940218568 +1.4768900834024 1.19404719397426 +-0.502411555498838 1.66561747714877 +-1.3408033400774 -1.05847968906164 +-1.4249397367239 -0.409735418856144 +1.29835471138358 -1.24441189318895 +-1.792983148247 2.69140809029341 +-0.502302639186382 -1.12332011759281 +0.432382110506296 -1.02115429192781 +0.000684991478919983 0.705896750092506 +-1.51542442664504 1.40594679117203 +1.50989475473762 -0.945537123829126 +1.22401609644294 0.644936252385378 +-1.57047022879124 0.0485957004129887 +2.10035035014153 2.15701800957322 +2.11222667247057 1.22868436574936 +-0.114641811698675 -0.878278937190771 +-0.0295891277492046 -0.25081367790699 +0.578598570078611 0.454996045678854 +0.62791945412755 -1.83548731729388 +1.50844958052039 0.787389662116766 +1.8427958227694 -0.477709244936705 +-0.217032413929701 -1.20778655260801 +-1.03441833332181 0.51334148645401 +-0.0766086429357529 -1.53615927696228 +0.657676853239536 0.0479584448039532 +-1.38503053411841 0.762060638517141 +-1.85945600643754 -0.0490229353308678 +-0.325347628444433 -1.27289285510778 +0.885798692703247 0.999500919133425 +0.181305386126041 0.553340043872595 +0.84357076138258 1.55880755931139 +-1.66228049993515 0.614380359649658 +0.163053505122662 0.637030217796564 +-0.0695256404578686 -0.802101269364357 +-0.663438461720943 1.69948781654239 +-0.500264193862677 -1.77401711046696 +-1.88397874683142 0.401685543358326 +1.78488719090819 2.37200390174985 +2.01893969997764 2.6637995839119 +0.0834745429456234 -0.406026910990477 +2.06964873149991 1.49931880831718 +0.775553602725267 1.2126682177186 +0.758092418313026 0.903564926236868 +-1.9801014252007 2.01315024495125 +-1.12002081423998 1.24265347793698 +1.88625931367278 2.45326886326075 +-2.64901087060571 1.81840563565493 +1.57735834270716 1.07454602792859 +-0.525874726474285 0.582716174423695 +1.79638411477208 0.0766831561923027 +-1.37646645307541 -0.336351349949837 +-1.33035645261407 -0.836080744862556 +-0.845957413315773 -0.783686581999063 +1.20292070135474 0.228619664907455 +0.842589635401964 -1.69082416221499 +2.36004054918885 1.19775392860174 +-2.60571858659387 1.47021387144923 +1.13436972349882 -1.36533337831497 +0.148266825824976 1.71622746065259 +0.82295473664999 -1.53533979132771 +0.773567821830511 0.325846962630749 +-2.35758912190795 2.41412544995546 +2.69990140199661 1.99405892938375 +0.970847081393003 -0.21447953954339 +1.89144602417946 1.75127951800823 +-1.9015325717628 -0.508084543049335 +1.66411156579852 2.02408622950315 +1.36255263164639 1.75476237013936 +-1.61002407595515 -0.845608804374933 +1.75395713374019 0.677158292382956 +-1.35594457760453 0.89791002869606 +-0.353782773017883 0.333531498908997 +-1.97033133357763 2.05879325419664 +-0.714930210262537 -1.45141080394387 +2.50466869771481 2.14408287778497 +-0.166085954755545 -0.103587731719017 +-1.34426841884851 -0.848168358206749 +0.916199486702681 0.596646778285503 +2.39365001395345 2.43589664250612 +0.506383322179317 0.485752958804369 +0.19283527508378 -0.625017423182726 +-1.16504696384072 -0.565386820584536 +-1.39224032685161 -0.858566083014011 +0.183774087578058 -0.775317411869764 +-0.721468590199947 1.56965611875057 +1.08868273720145 2.20188667997718 +-1.84448461607099 2.32056867703795 +0.594437785446644 -0.715535867959261 +-1.91090809553862 0.440925307571888 +0.324686162173748 0.807314418256283 +-1.17798274382949 -1.24971582368016 +0.046799685806036 -0.31355968490243 +1.87810526788235 0.437606986612082 +-0.00740975141525269 -0.169487126171589 +1.00447832792997 0.533286243677139 +-2.35611428320408 1.63007549196482 +0.633079010993242 -1.07237495109439 +2.12027174606919 1.76099521294236 +1.35369170457125 -1.03646264597774 +-2.34024313092232 1.78329748660326 +-0.768233880400658 0.263621442019939 +-2.70615360885859 2.10202656686306 +1.7639335244894 1.99786217883229 +-0.782537758350372 0.218737117946148 +-1.42106088995934 1.01979983225465 +-0.500244483351707 0.433337293565273 +0.102956507354975 -1.47647266089916 +-1.8329651877284 1.44061987474561 +2.23832821846008 0.909623149782419 +-0.859752740710974 -0.671882249414921 +1.31282724067569 -1.34941808879375 +1.78631226345897 -0.559286419302225 +0.207041025161743 0.897101238369942 +2.03144226595759 0.962347507476807 +0.0603122934699059 0.216611698269844 +0.0844604037702084 0.90239555388689 +-0.625112693756819 -0.664175786077976 +-1.5193378739059 0.204106543213129 +-0.864296562969685 0.63379043713212 +-2.13650075346231 1.20278086513281 +-0.0691469721496105 -1.63604700192809 +0.136363103985786 -0.910245444625616 +1.27465800195932 1.35598650202155 +-1.70769249275327 0.443525034934282 +-1.35124723985791 -0.0308694392442703 +1.90003884583712 2.33107142150402 +2.50229515880346 2.21366404369473 +-0.160902034491301 1.7154528349638 +1.71184361353517 1.08326069265604 +-1.27977304533124 -0.391831237822771 +1.47848774120212 0.13474589958787 +-0.297257453203201 0.601883351802826 +0.915728233754635 0.460545670241117 +1.42815625295043 0.546974163502455 +-0.0757043920457363 -0.551960323005915 +-1.38948917761445 -1.03249570727348 +1.29492139071226 -1.36065739393234 +0.535127185285091 1.80079112946987 +1.5804204531014 1.23143979534507 +0.317147668451071 -0.271339043974876 +-0.776994321495295 1.31635238230228 +1.65657556802034 0.769721794873476 +2.27375413477421 0.915496960282326 +-1.14509061723948 0.384536538273096 +1.8531792126596 2.37758757174015 +1.07925893738866 0.805695291608572 +-0.383929435163736 1.17128789052367 +1.33880440890789 -0.271188668906689 +1.65889555215836 2.71798944100738 +0.163553394377232 -0.767871785908937 +0.417434763163328 0.447188198566437 +0.400044910609722 1.44798548147082 +1.41010418906808 0.942833129316568 +1.37431006878614 2.33782985806465 +0.645901877433062 -1.48460501432419 +0.192493971437216 1.93734153732657 +0.333872176706791 0.152766354382038 +1.03373745456338 -1.33289316669106 +1.57668508589268 -1.15167524665594 +1.52696630358696 2.5977040566504 +0.637840632349253 -0.624298073351383 +-1.2953502908349 0.000577125698328018 +0.496900163590908 1.92494321614504 +-0.849070373922586 1.38233757764101 +2.69928949698806 1.97913662344217 +-0.181594479829073 -0.941586866974831 +1.34682164341211 -1.38756509870291 +-0.717227712273598 -1.17854425683618 +0.955796178430319 2.23549652472138 +0.459327977150679 -1.67023513093591 +0.413522705435753 1.22080536186695 +1.28486291319132 1.16106786578894 +-0.18416041135788 -1.98751051351428 +-1.84865221008658 2.43312085792422 +0.228503677994013 1.54455609247088 +-0.45500285923481 1.73779316619039 +1.56091017276049 0.741249196231365 +1.95727346464992 0.806566406041384 +0.238821174949408 -1.8140957057476 +-0.929044306278229 -0.889031019061804 +0.369125876575708 1.48615284636617 +-1.43741977959871 -1.14397687464952 +0.118279982358217 -0.360387578606606 +-0.642163813114166 0.486655037850142 +1.54515917599201 2.52141163125634 +0.480530686676502 0.193359263241291 +-1.82095745578408 -0.0504979379475117 +0.270392116159201 0.0581371635198593 +0.129429131746292 1.86541238054633 +-0.629300452768803 1.7857557348907 +0.54730286821723 -0.331462059170008 +1.83647996559739 2.15137536078691 +-2.54291970655322 2.09413921460509 +1.5993968732655 -1.13609224185348 +-1.11016167700291 -0.278833705931902 +1.54439233243465 1.26195622235537 +-1.07266439497471 -1.58680047467351 +0.996796909719706 0.556277628988028 +2.0337962731719 1.85696919634938 +1.12743551284075 -1.35644496232271 +-1.55063720792532 1.5792670994997 +-1.30126436054707 -1.31037767603993 +-1.55780618265271 0.946207471191883 +-0.522567883133888 1.22577122598886 +0.0624348893761635 0.926413737237453 +0.473900612443686 1.00256087630987 +1.15870916843414 1.84574948996305 +-1.97743318602443 1.05625651031733 +1.29697459563613 0.803650684654713 +0.596369192004204 1.40446190908551 +2.31139377132058 2.26159903779626 +1.29225946217775 0.5239642187953 +-0.142846565693617 -1.38669716566801 +-2.16436718404293 2.40477615222335 +1.67539570480585 0.041228786110878 +-2.06997912749648 2.07815561816096 +-1.6768424063921 0.074662771075964 +0.904149137437344 0.225554373115301 +2.42486250773072 1.82107873260975 +2.17270364239812 1.62208722904325 +1.49528807774186 1.11626117676497 +2.6956540197134 2.05512928590178 +1.35996893420815 -0.722513273358345 +1.18013816326857 -0.124725565314293 +2.43337002024055 1.05317191779613 +-2.15844300761819 2.17071307823062 +0.526352647691965 -0.187046229839325 +0.988592583686113 0.495474699884653 +-1.82842166349292 1.13256015256047 +-0.737802915275097 -1.57507827505469 +1.09924771636724 0.385991357266903 +1.54007135331631 2.14663383737206 +0.620115380734205 1.68537382408977 +1.60450657457113 -0.793278552591801 +0.193754300475121 -1.12601727619767 +1.28736381977797 -0.10853148624301 +0.672578863799572 1.13141871616244 +-1.84555500000715 0.533726051449776 +-0.552847791463137 0.674683775752783 +-1.7334644459188 -0.160763598978519 +0.739871196448803 1.15217412635684 +0.309304997324944 0.482448395341635 +-1.2238446213305 0.0672205984592438 +-1.38674059510231 -0.337230660021305 +-1.23239151015878 2.41451795399189 +1.543267801404 2.00898000597954 +1.42759009450674 1.71849688142538 +1.45801543816924 1.31180621683598 +-0.138838678598404 -0.923625487834215 +2.01231294497848 2.56081679835916 +-1.45885872468352 -0.392570741474628 +0.472467377781868 -1.37513014301658 +-0.5506277307868 -0.877700522542 +0.524381302297115 1.05331348627806 +-0.309098560363054 0.837738782167435 +1.16114041954279 0.703112848103046 +-1.25026923790574 -0.0689548999071121 +1.57363194972277 -0.411217585206032 +1.26060214266181 -1.11961471661925 +1.54378151521087 -0.564829245209694 +0.220768131315708 0.0955229960381985 +0.617549672722816 1.72634589672089 +-1.28840918466449 0.666421756148338 +2.07160582020879 1.39714492112398 +2.06161312386394 1.9285535030067 +1.55335453897715 0.185300469398499 +0.556440878659487 0.3132289275527 +-0.267988566309214 -0.276467196643353 +-0.857039120048285 -0.312185782939196 +0.60365379974246 1.73055183142424 +-0.600905742496252 1.73727433755994 +-2.22085829824209 2.49030419439077 +2.0025742277503 2.2722509726882 +0.912726990878582 -0.901390854269266 +0.526542071253061 1.46762549504638 +-1.56710520014167 1.3850760050118 +1.31415278464556 -1.17722496762872 +-0.0234162397682667 0.813470516353846 +-1.64692474156618 -1.00734670832753 +-1.12948875874281 2.3073160611093 +0.819665472954512 1.06937425211072 +1.51156301796436 2.45118643715978 +-1.14800245314837 2.46159310266376 +-1.58257056772709 -0.795325171202421 +-1.38152514025569 1.82629799097776 +-1.39955862611532 0.726010445505381 +1.26290082931519 2.50398024171591 +1.01945924386382 0.586650125682354 +1.62298318371177 -0.704238954931498 +-0.891975551843643 0.126301273703575 +-1.56214633956552 1.25332224741578 +0.470245085656643 -1.85216283798218 +1.14320358261466 1.57726724073291 +-0.753320436924696 1.82469775900245 +-0.258821256458759 0.912540100514889 +-1.71812358126044 -0.651166457682848 +0.24786963686347 1.52807755768299 +-0.0607943758368492 -1.36554244533181 +-1.50573675334454 -0.039066955447197 +1.17530309036374 2.10235041379929 +0.432529665529728 -0.380576658993959 +-0.887751508504152 -0.998901877552271 +0.894859157502651 -1.56122221052647 +1.16085424274206 0.588368192315102 +-2.57004598155618 1.60105132311583 +0.192781932651997 0.729219269007444 +-0.867646515369415 0.811794247478247 +-0.285022683441639 -1.43970903009176 +-0.405023757368326 0.785119000822306 +1.34468207508326 0.725539922714233 +1.4304340928793 2.32545579597354 +-0.422479424625635 -0.630041807889938 +-0.880339130759239 1.17446187511086 +-2.03695972636342 2.15664556249976 +-0.941390108317137 2.15713258087635 +-1.4999625980854 1.27709802612662 +2.42054955288768 2.3462987691164 +-1.6934056840837 2.57817444577813 +-0.999901942908764 1.06301248818636 +-2.75270127132535 1.75415220484138 +-0.344744421541691 -1.01087325811386 +1.58604636788368 0.208456698805094 +-0.290867336094379 1.24938291311264 +0.795580208301544 1.1538221500814 +-1.37425868585706 0.0585001297295094 +1.52873606607318 -0.109005521982908 +-1.79858132824302 -0.79760917276144 +-1.46674694120884 1.52614406123757 +-1.29856934770942 -0.362010132521391 +2.14565329626203 2.58922691270709 +-0.686767194420099 -0.572765983641148 +-0.349066130816936 0.453278236091137 +1.67376169189811 0.345757860690355 +1.89748713001609 0.0865381956100464 +-1.13426848128438 -0.759211603552103 +1.83328438177705 1.32343842089176 +1.03092344477773 -0.901608690619469 +-0.712751433253288 1.45955435559154 +-0.453286070376635 1.79938955232501 +-1.80620896443725 0.0547503978013992 +1.29195903241634 0.498561639338732 +0.248261626809835 -1.7378050275147 +-1.92154499143362 0.309081248939037 +1.49247858673334 -0.40906560793519 +1.91879709810019 1.80468394234776 +-2.51113039255142 2.062694568187 +-0.188645243644714 0.00270621851086617 +-0.902574919164181 -0.617682345211506 +0.28911005333066 -1.39590605720878 +-1.76729455217719 -0.874644581228495 +0.353449273854494 0.81724938750267 +1.16076809540391 1.55491418018937 +1.70490362495184 -0.892090190201998 +0.595857802778482 -0.176700994372368 +0.348251268267632 0.805930428206921 +1.97550889104605 1.07250170782208 +-0.567523002624512 0.810181748121977 +0.457383442670107 1.40968043357134 +2.10828642547131 1.25881857797503 +-0.976813457906246 1.80108971893787 +-2.13697197660804 2.6914309039712 +0.178013414144516 0.768708307296038 +0.276729945093393 -1.74430857598782 +-1.02707981318235 0.938040785491467 +-1.5563670694828 1.82238169386983 +1.8645891584456 2.14493918418884 +-1.39525942504406 -0.55048106238246 +1.46249288320541 -0.215698108077049 +0.0172556489706039 0.164285331964493 +-1.76832085102797 2.33161537721753 +1.29188754037023 0.560280419886112 +1.20506551116705 -0.513609863817692 +0.836268074810505 -0.857397109270096 +-0.672391526401043 -0.702607113867998 +1.17303144931793 1.60220643877983 +1.0193308442831 1.57729441300035 +0.240476354956627 -0.669399440288544 +0.121660884469748 1.6784554682672 +2.12028596177697 2.55389507114887 +-0.216054197400808 -1.33190174400806 +-0.105383038520813 1.69847087189555 +0.879361864179373 -0.837415795773268 +1.12777050212026 -0.86396924033761 +1.01943119987845 -0.705659311264753 +-0.65585470199585 -0.152932234108448 +2.2243849337101 1.8426287509501 +0.871305737644434 -1.37090126052499 +-0.452779460698366 1.4170678332448 +-1.13624343648553 0.236865434795618 +-1.80003566294909 -0.832085572183132 +2.13371417298913 2.543297175318 +-0.436683010309935 -1.16266361624002 +-1.96620646864176 1.33866954594851 +-1.54784697666764 2.38246389105916 +-1.99597705900669 1.61890365555882 +-1.35618937015533 -0.228576887398958 +-1.5302503220737 2.64416148886085 +0.236986327916384 -1.46399727463722 +-1.57630292326212 1.91907807439566 +2.15108158066869 1.03834665194154 +1.38579376414418 2.18519030883908 +-0.660367771983147 -1.60721273720264 +-2.44690085947514 1.47499300166965 +-1.73678717017174 1.06475897505879 +0.155773386359215 1.5806176699698 +0.176340591162443 1.38583674654365 +-1.91386594250798 2.64073124155402 +1.33377246931195 2.63890229538083 +-0.306396335363388 1.66813609004021 +-1.54609319195151 0.227354243397713 +-0.334848511964083 1.1717737801373 +0.0348297283053398 -1.95568075031042 +-1.47089136019349 2.56193177774549 +1.79430291801691 -0.615821070969105 +0.28622180223465 -1.88415913283825 +-1.38039598241448 -1.35253041237593 +2.10032783821225 2.5386090837419 +-0.233294989913702 1.94860342890024 +1.0803833194077 -1.22457321733236 +0.266824845224619 -0.646207042038441 +1.52721042558551 0.900187227874994 +-1.14078261703253 -0.514220107346773 +-1.00353052839637 -0.788588907569647 +-0.847953233867884 1.89588890597224 +-0.0446872599422932 0.572611678391695 +-1.67258572950959 -0.611500553786755 +-1.82663059234619 1.70049628615379 +1.24112888798118 0.270463190972805 +0.396462831646204 -1.70910073816776 +1.34976113587618 0.646636053919792 +-1.54944146797061 0.25681472197175 +-2.38186632096767 1.92515519633889 +-0.876689985394478 0.132299859076738 +-0.993009682744741 -1.11728988587856 +0.84791087731719 1.10787640884519 +0.99237011000514 0.231921568512917 +1.47963821515441 0.207533050328493 +-0.326642543077469 0.30014082044363 +1.21101628988981 2.36444960907102 +-0.348244100809097 -1.40881530568004 +0.177146334201097 0.875028502196074 +-1.02900411933661 2.02536120638251 +1.65110589936376 -0.955298408865929 +-0.0766108110547066 -0.649841103702784 +0.645113609731197 -0.598199956119061 +0.977368883788586 1.47550420463085 +1.75873035565019 -0.518412403762341 +-0.494701460003853 1.02133389189839 +-0.83024238795042 0.095396276563406 +1.68983100727201 1.56964970752597 +-1.17462633922696 -0.901320993900299 +-1.09865006059408 2.15848043933511 +-1.46937080100179 2.44062557443976 +-1.41455505043268 0.701091643422842 +-1.41699461638927 0.258086051791906 +1.29549089446664 -0.727072671055794 +-0.443223640322685 1.75493870303035 +0.917197152972221 -1.3046305142343 +1.99260730296373 0.870588280260563 +-0.625757165253162 -0.708540394902229 +-1.8633576259017 0.296224597841501 +-1.40130716562271 -0.609616290777922 +-2.47962906956673 1.74237286671996 +-2.15242037549615 2.33682316541672 +0.716623149812222 0.739150479435921 +-1.76225874572992 0.694887049496174 +-0.757928807288408 0.0979694910347462 +-0.711798120290041 -0.838762868195772 +1.45656773820519 2.09530458971858 +1.59066112339497 0.00282280892133713 +-1.15445387363434 -1.51595310121775 +0.603920113295317 0.380206286907196 +-1.85528212040663 -0.225229486823082 +0.0863387174904346 -1.59469087421894 +0.674937326461077 -0.560202557593584 +0.847286127507687 -1.56787387281656 +0.42176741361618 0.713025361299515 +1.43078879639506 -0.653936021029949 +0.331854652613401 -1.48195277154446 +-0.62209964171052 -0.0983878746628761 +-0.511545103043318 -1.06932408362627 +1.10589401423931 0.938678473234177 +2.57016348466277 1.73850410804152 +-1.67193328216672 2.26057479903102 +0.306364733725786 -1.58486895263195 +-1.17507544904947 1.70745014771819 +0.971020981669426 -0.6239258274436 +0.192015338689089 -1.34571276977658 +1.73203687742352 1.92948113381863 +0.515854239463806 0.864282224327326 +1.84539218246937 1.03234384581447 +-0.631268195807934 0.988143004477024 +-1.77206207066774 -0.530105005949736 +0.819924809038639 -0.495194520801306 +1.04357648640871 -1.12385459989309 +-2.02596700564027 2.10357126221061 +-1.27076816186309 1.34280800446868 +-1.33146039769053 -0.435503788292408 +0.0122264958918095 -0.435146845877171 +-0.0170656219124794 1.14172022789717 +-0.487819124013186 0.666167706251144 +-1.90408866852522 2.00657833740115 +0.722516383975744 0.36966910213232 +-0.882153369486332 -1.55249791219831 +-0.385498579591513 1.10002205148339 +-1.83205460384488 1.97577792778611 +-2.12112115696073 0.971886366605759 +-2.46511382237077 2.00173475965858 +1.52523473277688 2.52045095339417 +-1.2907173037529 -1.18366203084588 +-0.793888848274946 -0.198477130383253 +0.0865577384829521 -0.691697135567665 +-0.0455331355333328 1.38744111731648 +-2.65630106627941 1.58452495187521 +-0.872619681060314 1.23668154701591 +-1.24621826037765 -0.0252168513834476 +-0.934675876051188 1.63903185725212 +1.2465153709054 1.89280044287443 +-2.05372881516814 2.30371368303895 +2.55404566973448 1.39872566610575 +-0.0412734970450401 -1.03287495672703 +-2.00245136767626 1.67470711842179 +2.14595423638821 1.4337869361043 +-2.43435449898243 1.96776534244418 +-1.2085691653192 2.25083231180906 +1.70648610964417 -0.780565265566111 +-1.92906713113189 1.04836709797382 +0.976215414702892 -0.862506911158562 +2.32378655672073 1.27071339264512 +-0.365384545177221 1.16239683330059 +0.709454242140055 -1.15748507156968 +1.15443689748645 2.43953256681561 +0.509163312613964 -0.605195727199316 +2.2197281382978 2.18260639160872 +-1.0564346536994 1.79333805292845 +2.64304772764444 1.37915468588471 +-1.75191098079085 2.06154052913189 +1.0550943352282 -1.08878782019019 +2.22663734853268 2.64687102288008 +-0.942742098122835 1.59290903806686 +-0.515650451183319 0.331837579607964 +-0.359231993556023 0.48425230383873 +0.448253083974123 -1.60551860183477 +0.229414697736502 -0.689525261521339 +1.01882464811206 2.13020101934671 +1.52239710465074 2.6270376779139 +1.92789643630385 2.14849961549044 +-1.37203025445342 1.36140811815858 +-1.62888561934233 2.16896298527718 +1.30226709321141 -0.497972179204226 +0.494547557085752 -0.279083281755447 +0.923104893416166 -0.707898907363415 +1.57104682177305 2.74808261170983 +2.70458173006773 1.66419994831085 +-1.98663723468781 1.927869040519 +-1.24048339575529 1.04298413172364 +-1.13706537336111 0.601546764373779 +-0.559760667383671 0.76318135112524 +-2.24971466138959 1.69080771505833 +-2.15982699021697 0.981810662895441 +2.02424509823322 2.61124873161316 +1.69516575708985 0.698393248021603 +1.29270146414638 -1.48398783057928 +0.929037004709244 1.32668540254235 +0.260312657803297 -0.435005486011505 +-1.30148561298847 2.16897643357515 +1.62944799661636 1.49327427148819 +-0.664462145417929 -1.194022718817 +-2.32164691016078 2.26212844252586 +-1.24445926770568 -0.843641199171543 +-0.707790408283472 0.596810761839151 +1.4433654434979 -1.03927864134312 +-2.7105621099472 1.72086683288217 +-1.75494730100036 0.972798377275467 +-1.73170902207494 0.989878576248884 +-0.416060142219067 1.95349635928869 +-0.919867463409901 -0.510842300951481 +0.676331158727407 -1.70448723062873 +0.291951291263103 -0.30884100869298 +-1.30346575379372 -1.2417502515018 +-0.845484651625156 -1.03739028424025 +1.53564159944654 -0.486881125718355 +0.372276660054922 1.1635596267879 +1.8308362737298 2.57942580804229 +0.348529424518347 0.378941960632801 +-1.37882426381111 0.0167714990675449 +0.449067432433367 -1.48813380300999 +1.10908287018538 -1.64196319133043 +-1.53287832066417 1.17348574474454 +-0.580648139119148 -0.771738428622484 +-1.02461995556951 -1.56499572098255 +1.05632101371884 0.607365362346172 +1.12979988753796 1.44015334546566 +-0.851487845182419 2.00282349810004 +0.881357870995998 -0.00132635980844498 +0.286965344101191 0.85690276697278 +1.26578693464398 -0.398243397474289 +-0.0255469083786011 0.0335003957152367 +0.589740596711636 0.654264952987432 +1.14362880960107 -1.18496731668711 +1.38613891601562 2.59548110514879 +-0.955218181014061 0.159039359539747 +1.59112694486976 2.37507246434689 +-1.45307355746627 2.4564161002636 +-1.17920272052288 1.75684698671103 +-0.634684719145298 -1.4504374936223 +-1.45564009994268 0.725729085505009 +0.791943475604057 0.252323735505342 +-0.136307075619698 0.63487222418189 +0.749371539801359 0.870415315032005 +1.5953791514039 2.51618684083223 +-1.78271155059338 -0.280802559107542 +-2.56371947005391 1.3695773370564 +0.155722618103027 0.789639081805944 +0.178133107721806 -0.441084422171116 +1.34758930280805 0.437792602926493 +0.211157578974962 -0.351034052670002 +0.3144910633564 -1.15529216080904 +0.202232498675585 1.80045663937926 +0.141278401017189 1.00751872360706 +1.55902537703514 0.819741029292345 +-0.304598957300186 1.72886266931891 +-1.95056860148907 -0.0311811678111553 +-1.76587482169271 1.19900687038898 +-0.644126247614622 -1.2519394941628 +0.0808276049792767 1.85443824529648 +-1.88038260117173 -0.397977069020271 +-2.11819876730442 2.24127789959311 +-1.65277890861034 -0.739794079214334 +0.989715095609426 -0.500991240143776 +0.193057909607887 -1.71221650764346 +0.0331682376563549 1.43859615176916 +-1.76934826001525 -0.307277213782072 +0.284778695553541 0.660555500537157 +-1.87049125507474 0.231289204210043 +0.121066201478243 1.83292461186647 +-0.978937670588493 1.27014483883977 +1.03515638783574 -1.22348323836923 +2.02493131905794 1.24031110107899 +-1.18239967897534 0.796192180365324 +1.53600875660777 2.57252229005098 +2.27681195363402 1.56778157502413 +-0.699734024703503 0.676739640533924 +-1.3090114556253 -1.46504875272512 +-2.55501817539334 2.11378353461623 +-0.613506603986025 0.9585205540061 +1.92258638516068 2.17517499998212 +-0.714522164314985 -0.988747969269753 +1.82831159979105 1.79368776455522 +-1.10645763576031 2.04739760607481 +2.50890376418829 2.34489721059799 +-1.28032708913088 -0.635631080716848 +1.68174647912383 0.335486825555563 +-1.80763270705938 -0.298963338136673 +0.528042804449797 -0.999786503612995 +2.33020516112447 1.58979452028871 +-1.71055898442864 2.32244940474629 +-0.856102030724287 0.891209255903959 +-0.326126039028168 -1.22918394207954 +1.30368058383465 -1.49581833183765 +0.728180281817913 1.22306437790394 +-0.818217374384403 -0.729386974126101 +-1.98837438225746 2.41348496079445 +1.79457629472017 2.33774131163955 +0.171433702111244 0.896550226956606 +-0.469037272036076 1.2277662307024 +1.27484050765634 1.97433346509933 +0.34203714504838 0.875585418194532 +-0.848352544009686 0.39140272513032 +-1.32793113589287 -0.0236349143087864 +1.80636756494641 0.0231401883065701 +0.82909968495369 0.63939107209444 +2.22132615372539 1.37761812284589 +1.56609800830483 0.48029425367713 +0.91469294205308 -1.3351822309196 +-1.0851996243 1.32992600649595 +-1.45724626630545 -0.226342245936394 +-0.597901996225119 -1.81736213713884 +-1.79976405575871 0.652198445051908 +-1.85505126044154 0.554871771484613 +1.96662965416908 1.5572167262435 +-0.555009197443724 -0.477243240922689 +-0.589593041688204 -1.80936207994819 +0.906489491462708 0.273079890757799 +-1.1864638030529 0.680250678211451 +-1.28476342558861 1.08014846593142 +1.36212438717484 2.02536749094725 +0.806853417307138 1.01335589960217 +1.48756861314178 -0.485384546220303 +2.2400438003242 2.04979971051216 +1.01842350885272 -0.852658830583096 +-2.60844051092863 1.95480117574334 +-1.32150540873408 1.39983399957418 +0.14582196995616 -0.0290753319859505 +-1.5202842541039 2.47899068892002 +-2.07272784039378 1.78517073392868 +-1.07362318411469 -0.959627367556095 +-1.38324384391308 2.09849534928799 +-1.1832100301981 -1.42560493946075 +0.233784440904856 0.371937461197376 +2.73589451983571 2.00480400770903 +1.317209135741 2.19323244690895 +-0.161241948604584 0.671335317194462 +0.087350994348526 -0.419967547059059 +1.15877569466829 0.446815136820078 +-1.12582758814096 1.81553603336215 +1.11201325058937 1.99452042579651e-05 +-1.31384374573827 0.179069634526968 +-0.941334027796984 0.587311770766973 +1.3219775184989 0.811321217566729 +0.440432347357273 -0.172150015830994 +0.256781093776226 0.820552468299866 +1.69355700910091 1.28916246443987 +-1.46420361474156 -1.22710915654898 +2.60700872540474 2.02061128616333 +0.773373793810606 -1.68216570466757 +0.132056105881929 -1.18330582231283 +1.71311828121543 -0.128314293920994 +1.79935869947076 0.856114652007818 +-0.458309482783079 -1.70375842601061 +-0.802330486476421 0.511919766664505 +-1.84868917986751 1.20658145472407 +-2.22521344944835 1.01516569778323 +0.30754940956831 -1.82038067281246 +0.444598820060492 0.315748747438192 +-0.138920392841101 -0.73116747289896 +1.12393140792847 0.820413455367088 +0.823003008961678 0.21518399566412 +-2.29018425196409 1.26853665709496 +-0.414806071668863 1.84696693345904 +-1.37511441856623 -0.126306019723415 +-0.948101066052914 2.09563609212637 +-0.978493999689817 1.7349668815732 +0.404231369495392 1.77126403898001 +-0.283522885292768 1.24712531641126 +-2.43691884726286 1.57182731479406 +-0.926123578101397 0.751241080462933 +-1.48163148760796 1.98432969301939 +-1.13180942833424 2.46510558947921 +0.731281854212284 -0.902782823890448 +1.90465720742941 -0.276375476270914 +-1.23226910457015 2.28208062052727 +-1.80330476537347 2.05747677385807 +-1.11091822385788 -0.604769330471754 +-0.28366507589817 -0.987368952482939 +-1.93307116627693 0.408875796943903 +-2.2218388132751 1.92737624421716 +1.25384836643934 0.0592619329690933 +1.06631552800536 -0.029660452157259 +1.20426149666309 1.14117193594575 +1.08679255470634 0.168143969029188 +-0.450238239020109 -1.06667506694794 +-0.231419384479523 -1.97931176424026 +-1.14300668239594 0.956109337508678 +1.3381629101932 2.58639293164015 +0.209050945937634 -1.42569424584508 +0.407158579677343 -0.822405245155096 +-0.960367280989885 -0.74620982632041 +0.160901118069887 1.10187945514917 +-0.857731752097607 -1.37392520904541 +-0.188565868884325 0.373512003570795 +1.18416328728199 0.853956583887339 +-0.249946523457766 -0.584791827946901 +-0.385774146765471 0.597081907093525 +0.987276844680309 1.64686994627118 +-0.422510612756014 -1.57460716366768 +1.73169268295169 1.14694384485483 +2.29878545925021 2.51595646515489 +-0.629363067448139 1.32364158332348 +1.92346667125821 2.09715102612972 +2.56113024801016 1.32903331145644 +-1.21563898772001 -1.4252568744123 +2.45645239949226 2.48565460368991 +0.861103300005198 -0.136391386389732 +0.731179151684046 -1.22035099565983 +-1.02301658689976 -1.63199983164668 +0.648143272846937 1.78779175132513 +-2.47236002609134 1.37413542345166 +1.24581446871161 -1.5004388615489 +-1.71195172145963 1.69993434101343 +-0.779162708669901 -0.137051682919264 +1.21662604436278 1.59702369570732 +-1.28909985348582 1.32249707728624 +1.38165430724621 -0.411507800221443 +-0.923773985356092 1.88411480560899 +0.1241051889956 -0.493228070437908 +0.730998080223799 -1.50735555961728 +-1.42953781783581 -0.907238744199276 +-0.134450435638428 1.23529382795095 +0.960632476955652 1.58048288151622 +0.359725594520569 -0.430516012012959 +-1.91811810061336 1.53990317881107 +1.92651942744851 1.41985943168402 +-0.00873232632875443 -0.0340142957866192 +0.175955947488546 1.34597968682647 +0.10726448521018 -1.01392716169357 +-1.81101080775261 0.654233556240797 +-1.04033045843244 0.304074335843325 +0.864867649972439 1.98768180608749 +-0.177299074828625 1.78910635784268 +-0.64570339769125 -0.680313561111689 +0.433077111840248 0.734470143914223 +0.170852396637201 -0.871793054044247 +2.5143909715116 1.62458437681198 +2.07705876231194 1.59443159401417 +0.548106957226992 -0.841678600758314 +-1.44460164383054 -0.304510686546564 +0.135503079742193 1.47673847898841 +-1.61480413749814 0.280255608260632 +-1.80813021957874 -0.288020145148039 +-0.0285997316241264 0.626680534332991 +0.456993032246828 -0.381515610963106 +0.698564808815718 0.857706300914288 +-0.622973620891571 -1.83582999557257 +-2.07506953552365 1.55360172688961 +1.67766311764717 1.7504082210362 +1.76012835651636 -0.763470739126205 +1.39740008115768 -0.963811445981264 +1.00242787972093 1.46014507487416 +-0.0272649712860584 -1.09671343490481 +0.733004815876484 -0.911984823644161 +-0.74523426219821 0.130224011838436 +1.09361341968179 -0.710207458585501 +-1.33077286183834 1.31538712233305 +-0.634068753570318 -0.751738764345646 +-1.09396594762802 -0.45988816395402 +-0.0770794972777367 -0.874907784163952 +0.299477178603411 0.296096336096525 +-0.593131188303232 -1.54000959917903 +-1.32823869585991 1.97884861007333 +2.33563550189137 1.18871821463108 +-0.895966798067093 -0.815455183386803 +1.97976575791836 0.898480366915464 +-0.313396662473679 1.30348579213023 +1.86884000897408 2.2677069157362 +0.469547811895609 -1.03560202941298 +2.15516727417707 1.34409138932824 +-0.227274168282747 0.654883060604334 +1.83922416716814 0.801765721291304 +1.76696893200278 -0.882069334387779 +-2.34859005361795 1.29161311686039 +1.31421870738268 2.0156329497695 +1.64423055946827 -0.509732574224472 +1.92399939894676 0.813092350959778 +-1.60881197080016 1.50658148899674 +-1.83418660610914 -0.101499047130346 +-0.818953473120928 -1.7067817337811 +-1.68366635218263 1.95339458808303 +-0.971033543348312 1.6464666724205 +-0.836421322077513 -1.12989931553602 +0.781277891248465 -1.33023568987846 +0.656126130372286 -1.15042977780104 +-0.000430651009082794 1.4903018027544 +-0.0378729701042175 0.958509918302298 +1.71840077638626 0.65068119764328 +-1.27917374297976 0.484373059123755 +0.409755021333694 0.806063029915094 +-0.113932311534882 0.928813423961401 +-0.957073748111725 -0.968538403511047 +0.438218507915735 0.618115447461605 +-0.264511093497276 -0.166913021355867 +-0.340417165309191 1.35785613581538 +1.10081712901592 1.90627477318048 +-0.360229104757309 1.34178064391017 +1.16831948980689 -0.83953519910574 +0.752963546663523 -0.757756028324366 +-1.10175457596779 1.14992834255099 +0.830139309167862 -0.344914380460978 +-0.408921517431736 -1.04360637441278 +1.24010152742267 -0.42424376308918 +0.148919489234686 -1.16157643869519 +-0.372517861425877 -1.72158771008253 +1.7070114351809 0.911890551447868 +0.406485542654991 -1.19170558825135 +1.91867784410715 -0.404295593500137 +-1.89149553701282 0.983305435627699 +-0.358908861875534 -1.21336421743035 +1.21668677404523 -0.3226686604321 +2.51544455811381 2.31026931479573 +-1.4080295599997 0.422599449753761 +1.20837393403053 0.145167369395494 +1.34825099259615 1.15926613286138 +1.16655233129859 2.23332107439637 +-0.351753123104572 -1.41671456769109 +1.66718448325992 1.19637889042497 +1.22080628946424 -0.477553192526102 +0.443488195538521 -0.497285302728415 +0.84282948449254 0.14185868576169 +0.386103212833405 -0.634570933878422 +1.46037838608027 0.26014057919383 +-1.39079090580344 2.16905022412539 +2.40182330831885 1.7037850767374 +0.484121810644865 1.55106157809496 +-1.71091304346919 2.72542923688889 +0.161875270307064 1.27137557044625 +2.12500772252679 1.43173980340362 +0.240108534693718 -1.43467833474278 +0.769589018076658 0.670322801917791 +0.244401704519987 -0.271251730620861 +1.1635411567986 -0.237991239875555 +0.803581487387419 -0.692762237042189 +-2.28540902584791 1.90644216910005 +-2.39361248537898 1.61623910069466 +0.859886080026627 0.805359594523907 +-0.818678826093674 -1.29967925325036 +2.26783499494195 1.55795821547508 +-1.34667026996613 2.15096535161138 +1.2860598526895 -1.11867181956768 +-1.54761004075408 0.481441363692284 +2.02345432713628 2.30243618786335 +0.189203031361103 -1.18375464901328 +2.38989270478487 2.09131558984518 +1.27135165408254 -0.827909290790558 +0.476273659616709 0.43297129124403 +0.549993649125099 -0.712449744343758 +-0.590019319206476 0.609020862728357 +-1.37739800661802 0.822743955999613 +-0.514302976429462 -0.108744259923697 +0.140620991587639 0.528586748987436 +1.11176813021302 1.01990537717938 +-1.70119750127196 1.9610542729497 +-0.693112593144178 0.922962408512831 +-0.968886598944664 0.0183605402708054 +-0.40006460621953 1.51226245239377 +0.692324854433537 -0.959333036094904 +1.70040862262249 -0.716264363378286 +0.112045742571354 0.0550109781324863 +1.44786951690912 0.105628997087479 +-1.97281047329307 1.57007339596748 +0.854845203459263 -0.357608176767826 +2.4958129376173 2.35387401655316 +-0.944571565836668 2.0687990039587 +2.08508951589465 1.02542628720403 +-2.19984470680356 2.28132992610335 +-0.818507105112076 1.63400863483548 +-2.582839012146 1.87758314609528 +-0.911653608083725 0.937695942819118 +0.989647872745991 -0.360147427767515 +-1.2352470010519 1.12102124840021 +-1.46631982922554 2.64640394598246 +1.39416750892997 2.0045725889504 +-2.3352920897305 1.71312268078327 +-1.1889609284699 -1.01699908450246 +1.61888091266155 2.22593560069799 +-0.192873489111662 1.61981715634465 +1.4073628410697 -0.958952866494656 +0.189776904881001 -1.12140179052949 +0.352808032184839 -1.31883667781949 +-1.43774710223079 -0.111649796366692 +1.54545639455318 -0.214231602847576 +-1.21707468107343 0.239244759082794 +-0.431962318718433 0.63491116091609 +-2.49045175686479 2.00828633457422 +-1.25643348693848 1.51512194424868 +2.43773312866688 1.06484807282686 +-1.91906216368079 0.403031967580318 +-1.69426439329982 -0.0742638781666756 +-1.85025086253881 0.144867029041052 +-0.911288324743509 1.96707667410374 +-1.43946547806263 0.13005780801177 +-1.76467442512512 -0.255941100418568 +-0.754928324371576 0.96613272279501 +0.15705419331789 -0.351710144430399 +0.238132137805223 1.18852050602436 +-1.24621857702732 2.06510658562183 +1.94084545224905 1.1559029109776 +-1.69807329028845 2.36016522347927 +1.84611836075783 -0.422526262700558 +0.732420492917299 -0.0912571959197521 +0.601432427763939 1.49548490345478 +1.80730123072863 2.44454520568252 +1.7521532215178 0.734295856207609 +-1.75109869614244 2.01851295307279 +-1.46045888587832 -1.18062547966838 +-2.11592252925038 1.15961290895939 +-0.0866617895662785 1.32037572190166 +-0.915400538593531 -1.57618939876556 +0.227391455322504 -0.700619127601385 +2.21257435902953 0.997094083577394 +-1.62462072446942 2.57901826873422 +-2.73094729706645 1.97695105522871 +0.12321599945426 -0.0246176086366177 +2.33971294760704 1.40405578538775 +-0.706792812794447 -0.837295100092888 +-1.00911624357104 0.838658902794123 +-1.94533491134644 -0.0701416581869125 +-1.15551827847958 -1.38875709846616 +-0.934895634651184 1.04327037930489 +-0.211871236562729 -1.09823168441653 +0.169712942093611 -0.0446674339473248 +1.06625872477889 1.97118170186877 +-1.25804281234741 0.886788982897997 +-2.61787576600909 1.32074421644211 +1.36078009009361 2.01001681759953 +0.332552999258041 1.29355490207672 +-2.52466997131705 1.41966654732823 +0.293810479342937 1.44154932722449 +-0.513594482094049 -1.43964662402868 +1.0289658755064 0.668009996414185 +-0.727017242461443 -0.596485421061516 +-1.82586620375514 2.35403306037188 +-1.48376322537661 -0.55543253198266 +-1.94583880156279 1.03091420978308 +1.85508842766285 2.48763942718506 +-0.353873550891876 1.8073241263628 +0.338837869465351 1.19262959808111 +0.00111318379640579 -0.928653158247471 +-2.22954984381795 1.6353726349771 +2.32841690629721 1.02519375830889 +-1.497622910887 2.11524688452482 +-1.13152888044715 2.32170767709613 +1.90996831655502 0.276282824575901 +0.00990390777587891 1.00517755746841 +0.701457440853119 -0.304533410817385 +1.27289244160056 2.24463789910078 +0.273747391998768 -1.36919137090445 +-1.99362321943045 1.75929698348045 +-0.463237706571817 -0.969978157430887 +1.42394747957587 1.11331262812018 +1.76678412035108 0.654731053858995 +-0.841969907283783 -0.67922318354249 +-2.05711057409644 0.886829487979412 +0.860604699701071 1.35574603453279 +1.80854593589902 1.26956680044532 +1.20694830268621 0.793823100626469 +0.907100852578878 -1.77990494295955 +-2.34964608773589 2.43125440180302 +-1.30557874962687 -0.0671508871018887 +-0.976151529699564 0.520547077059746 +-2.11239970102906 2.15287859737873 +1.8851876296103 1.38906580209732 +-2.27535513043404 2.55938155949116 +0.00746240094304085 -1.9401679225266 +-0.847445383667946 2.0115709528327 +-0.443183153867722 0.241431519389153 +-0.1369041018188 -1.43644507974386 +-1.6810755841434 0.153430592268705 +0.259402230381966 0.225520148873329 +0.896363895386457 -0.121488932520151 +1.75927463546395 0.214562363922596 +-1.2606627009809 0.65103916823864 +-1.26344175264239 2.55622681230307 +0.189495876431465 0.440596155822277 +2.47990715503693 2.05375295132399 +1.27944708615541 1.61408348381519 +-0.133404325693846 -1.60597418621182 +1.04190653935075 -1.06877098977566 +-0.818794757127762 -0.225817736238241 +1.49698880314827 1.16084482520819 +0.618656251579523 1.77822134643793 +-1.95838677510619 1.42047830671072 +1.55314322933555 1.89102985337377 +0.409736018627882 0.446480236947536 +-0.926668312400579 0.938058037310839 +1.69760521873832 2.00045891106129 +0.0496425293385983 1.87705323472619 +-1.08922987431288 -0.0579799301922321 +1.40526758506894 -0.347946129739285 +-0.663692086935043 0.9941131696105 +-1.20046944916248 0.322345469146967 +0.148903734982014 0.90363060683012 +-1.13516391813755 1.51653366908431 +-2.35571358725429 2.14477759599686 +2.11366003751755 1.89914147183299 +-1.12080305069685 1.68686212599277 +-1.44494257867336 2.5803909227252 +-0.210754711180925 1.48191699385643 +0.178078010678291 -1.2725990191102 +-0.79769379645586 -0.174297284334898 +1.89813053607941 1.18378107622266 +0.32603557407856 -1.82324499636889 +-0.954569336026907 -0.447752676904202 +2.51113520562649 1.14787375554442 +-0.424628272652626 -1.68265929445624 +-2.17934715375304 1.63298554718494 +0.571811735630035 0.444216702133417 +-0.89815591648221 1.21311965212226 +1.73257183283567 2.45075498893857 +1.55595071613789 1.79428355395794 +1.9856330640614 1.1643291823566 +2.34012077748775 1.82905661687255 +-0.993985898792744 1.20986743271351 +-1.48339118435979 2.12659515440464 +2.27551550045609 1.29373554885387 +-1.70822201296687 -0.318286787718534 +2.35754160583019 1.58456902205944 +2.13829482719302 2.29005823656917 +0.253564484417439 -1.18492783606052 +1.57069293782115 -1.01018716022372 +0.404335621744394 -0.0650912597775459 +-1.92300165072083 1.27411010116339 +1.57898884639144 0.703308567404747 +0.845139045268297 -0.418311722576618 +1.2988861836493 0.587784543633461 +0.518400866538286 -1.58826111257076 +-1.13196152821183 0.516853500157595 +1.1149092130363 -1.18224715813994 +1.6822575032711 -0.70547254011035 +1.05904187262058 2.12071667984128 +2.11530291289091 2.43616784363985 +0.58172482997179 -0.34589896351099 +0.589927405118942 -0.157101996243 +0.765804897993803 1.41523890197277 +1.74744364619255 1.74969131499529 +-1.87116086110473 0.185410618782043 +-0.1977972202003 0.400169193744659 +-0.990963570773602 -0.647068481892347 +-1.38111573457718 2.33138216286898 +-0.813261747360229 -0.918440371751785 +-0.832913897931576 0.525791399180889 +0.205339454114437 -0.950149931013584 +-1.22695801407099 0.465654268860817 +-0.354969248175621 0.496655818074942 +2.43421614915133 2.20943011716008 +-0.304907817393541 -0.955091889947653 +2.00556206703186 1.12544449418783 +1.92796642705798 1.13527294993401 +-1.72183781862259 1.6249023526907 +0.401384476572275 0.135925099253654 +1.51852075755596 1.45859986171126 +0.325174354016781 0.111687082797289 +-0.539653137326241 -0.497575681656599 +0.331989172846079 1.82191975414753 +-1.4252701587975 2.16774620115757 +-0.144425354897976 -0.466782595962286 +-1.13555488362908 -0.556928493082523 +-2.34426283463836 1.63735318183899 +-1.29355070367455 0.300822611898184 +-0.331732597202063 -0.165921099483967 +-0.187553338706493 -0.0724310129880905 +1.96293422579765 2.69726276397705 +-1.21067130938172 1.52139539644122 +1.59623555094004 2.62135153636336 +-0.944612737745047 1.65927823632956 +0.809443186968565 -0.0622059032320976 +-0.127949818968773 0.879051592200994 +0.844926618039608 -0.51328931003809 +-0.287330526858568 -1.06635941937566 +1.94853788241744 1.58533385768533 +-1.84340372309089 -0.193597558885813 +0.697454612702131 0.601311907172203 +-2.53215774893761 1.67085422202945 +1.48770397901535 0.765713587403297 +1.52033101022243 1.15315987169743 +-1.78449742868543 2.35797537118196 +2.31377111375332 1.41708736494184 +1.35620972141623 2.26367348432541 +-1.65130112692714 2.06000670790672 +0.396589934825897 0.44348818808794 +0.324352670460939 -0.158905539661646 +-0.361423842608929 -1.24650709331036 +0.433208342641592 -0.279201403260231 +-0.43550531566143 1.68990592285991 +-1.60780624672771 1.11082122474909 +1.73133760690689 1.79797317832708 +2.43849250674248 1.38229438662529 +-0.299469012767076 -1.74471325799823 +1.08706213161349 -0.293303485959768 +-0.291773624718189 1.46568392962217 +0.803336292505264 -0.607428457587957 +1.03229028731585 -1.69978650659323 +-0.479393821209669 -0.446806456893682 +-0.834967110306025 1.63190622627735 +0.636984921991825 1.73768143355846 +-1.06361171230674 -0.143274985253811 +-1.25952018797398 2.25864502415061 +0.809904493391514 -0.498910147696733 +-1.43502854928374 -0.510334730148315 +-1.6554843261838 0.248946350067854 +-0.589875150471926 1.75820660218596 +2.23427580669522 0.896198321133852 +-0.119215946644545 0.0970341823995113 +1.56668760254979 0.992664005607367 +-2.50893969461322 2.14301791042089 +1.15657492727041 -0.421223007142544 +-0.78032822906971 1.85029668360949 +-0.562441989779472 0.401790745556355 +-0.47108544036746 -1.3189491443336 +-1.37896786630154 0.832404494285583 +-0.61160484701395 0.0440536737442017 +-1.54219305887818 0.672993030399084 +2.59466862678528 1.27498580887914 +-1.58820946514606 0.764698434621096 +1.30962435528636 -0.356178529560566 +1.84168921783566 1.24110063537955 +1.29256866872311 2.60901599749923 +1.24603347852826 2.41129258647561 +-2.64501053839922 1.86759171262383 +-1.35711774602532 2.55455750972033 +0.746162887662649 -1.61713218688965 +0.0798916891217232 0.601995587348938 +-2.43406558036804 1.74106682837009 +-0.879188474267721 2.2131115719676 +-0.560586262494326 -1.3882253728807 +2.17716415598989 2.50622313097119 +-1.84439251571894 2.69619301706553 +2.27160254120827 1.68926881998777 +-0.332596436142921 0.415987715125084 +-1.44145343452692 1.98718224093318 +0.612818073481321 1.58679561316967 +1.53858286514878 1.75287451967597 +-1.33638124167919 -0.861885357648134 +-1.08076728135347 -0.988055072724819 +1.83729149028659 0.204145684838295 +-0.623347148299217 0.475584805011749 +-0.991771582514048 -1.09158610552549 +2.30239060148597 1.99936077743769 +1.71475706622005 1.06199776381254 +-1.3150331415236 0.590358354151249 +2.60970497876406 1.63954050466418 +-1.27026998251677 0.86134621873498 +-0.974529698491096 0.214717838913202 +1.45006487518549 2.40938724949956 +1.2625614926219 -1.24214157834649 +-0.170943569391966 0.962725646793842 +1.56912638619542 1.28197644278407 +0.860690008848906 0.385002806782722 +-0.106345932930708 0.342781770974398 +0.943748936057091 -1.44625946506858 +1.68963718414307 2.1058597266674 +0.694411162286997 -1.56854231655598 +-0.693399425595999 -0.311491683125496 +-0.107904348522425 -0.300053924322128 +1.1605437323451 0.539197582751513 +-0.383863247931004 -1.81783757358789 +0.0211889781057835 1.51923562586308 +-1.48047061264515 1.99572211503983 +-1.8963265940547 0.221708815544844 +-1.20221422612667 0.359080608934164 +2.6162308268249 2.08723966032267 +-1.86858968809247 0.66895055025816 +-0.962799292057753 0.0859144069254398 +-0.465460162609816 -0.931790269911289 +-1.60933033004403 2.4452146589756 +1.17736082151532 -1.45229281485081 +-1.64149008318782 -0.0998606495559216 +1.7752399072051 -0.71979358792305 +-0.783054545521736 -1.40251789987087 +0.234654936939478 -1.58600978180766 +0.58033087849617 -0.748550366610289 +-1.90480010583997 1.56681554019451 +-1.03799159824848 1.95584250241518 +1.08115602657199 -0.468075897544622 +2.00295348465443 2.21343529969454 +-1.13696031272411 -0.317955814301968 +0.282958164811134 -1.58415558934212 +0.420478258281946 -0.813194897025824 +0.102886781096458 -0.779950436204672 +1.36936252191663 0.225148435682058 +-1.34729197248816 1.01942044496536 +1.53009315952659 0.909103371202946 +-0.658396322280169 -0.753399543464184 +1.79261982440948 -0.262618996202946 +0.583348255604506 0.0147824510931969 +-0.522474568337202 -1.4126725718379 +-0.906382046639919 0.58741732686758 +-0.602347191423178 0.119922380894423 +2.26874242722988 2.60309627652168 +1.59241349622607 0.474422782659531 +0.881649512797594 2.03407746553421 +-1.62226830422878 2.11579298600554 +0.994501914829016 0.668223965913057 +-1.15842272341251 2.31485716626048 +0.211986571550369 -0.0734643414616585 +-0.624419450759888 0.483107920736074 +-1.78700317069888 0.108719736337662 +-2.08779327198863 2.63648213818669 +1.22670106962323 -1.55703055113554 +0.670108482241631 0.91326666995883 +1.61010926216841 1.95010170713067 +0.620652854442596 -0.882291678339243 +-1.1628533564508 0.461979772895575 +2.2266492433846 1.31345502287149 +-1.62142623215914 -1.11081631109118 +-0.617651168256998 0.0647894628345966 +-0.811671018600464 1.2208102978766 +0.237833980470896 -0.753255221992731 +0.97948008403182 -0.0338917039334774 +0.156821418553591 -0.666067086160183 +-1.342354696244 1.6389994174242 +-1.54323527961969 2.25725488364697 +-1.05316337570548 2.20065246894956 +0.657830975949764 -0.648187037557364 +1.38613677769899 0.58720763400197 +-0.782260403037071 -0.26339952647686 +0.44032796099782 1.81574277207255 +1.46070072054863 1.43597199767828 +-0.716538436710835 0.371804386377335 +-1.09276594594121 -0.275377713143826 +-1.49627240374684 0.471240390092134 +0.57981800287962 1.815324857831 +2.4621400795877 1.97493487969041 +-2.66918778046966 1.79396991804242 +-0.298991650342941 -0.578591756522655 +0.988132733851671 -0.104100853204727 +-0.327946946024895 -0.752921551465988 +1.38685739040375 0.117118045687675 +-0.64865717664361 -0.30232522636652 +-0.546898003667593 1.5873331092298 +-1.49897158890963 -0.913757015019655 +-1.09021601453424 0.848263591527939 +-0.320549670606852 -0.239252358675003 +0.0904785208404064 1.23896200954914 +-1.77737388759851 1.89998495206237 +-1.93136211112142 1.65279033407569 +0.631339211016893 -1.82964565232396 +-1.91232411190867 2.11860547587276 +-1.19330284371972 2.30624523386359 +-2.53579203039408 1.5478145442903 +-0.192411940544844 -1.45809458568692 +-1.85302856937051 0.752138309180737 +1.89287273958325 2.74008622765541 +-1.04250741750002 0.193212371319532 +1.27562734112144 -0.418145980685949 +1.77442010119557 -0.903015896677971 +0.46977075189352 -0.43333537876606 +2.11808300018311 2.43337818235159 +0.688495025038719 -1.03752862289548 +-2.69392375275493 1.92976699769497 +-0.837532605975866 -0.198896203190088 +0.653783667832613 -1.78212733194232 +-0.0959374494850636 1.32818652316928 +-0.0644411258399487 0.981948718428612 +0.557495482265949 -0.0173930414021015 +1.02625918760896 -1.31626728922129 +-1.13721644505858 -0.63504109159112 +-0.458112303167582 1.12636802345514 +-0.983000241219997 1.42918458208442 +-0.837255921214819 -1.59328313916922 +2.29300066456199 1.37496708333492 +-0.703569184988737 0.283491875976324 +-1.75841117277741 1.30724526569247 +2.32311517745256 1.34328655526042 +0.775205828249454 0.532462984323502 +-0.375845909118652 1.06066490337253 +-1.21567707881331 2.46823626384139 +0.336818538606167 0.128541518002748 +1.51837332546711 1.14277412742376 +-1.59441809728742 -0.989736590534449 +-1.51170239597559 -0.887233525514603 +-1.81192544475198 2.14165158569813 +-1.38947232067585 1.62125171348453 +1.2475764490664 -0.864988032728434 +-1.9311491176486 0.101769026368856 +-0.358296927064657 -0.634993430227041 +0.0972351841628551 0.0385658219456673 +0.56409315392375 -0.52484679967165 +-0.654433283954859 1.15817462280393 +-2.2833559513092 1.8690365254879 +-2.6278076171875 2.01589938625693 +-0.153433874249458 1.08611988648772 +0.029228188097477 0.404660366475582 +0.302856381982565 -0.314573220908642 +0.587397113442421 -0.543126881122589 +1.54116045311093 -1.04606546834111 +0.958322558552027 -0.527815390378237 +1.29699696600437 1.25186222791672 +0.297071639448404 0.727243777364492 +-0.828453466296196 -0.120953697711229 +0.675423596054316 0.195587668567896 +0.324492201209068 -1.32724549993873 +2.6195154003799 2.28915411233902 +2.19447880983353 2.60289345681667 +-1.28758288547397 1.22408183664083 +-0.383953996002674 -1.94530162215233 +1.30764616653323 -1.17165103182197 +-1.98722835630178 2.71314226090908 +0.0999722480773926 0.586431857198477 +0.519043359905481 -1.14340651035309 +-2.03344832360744 0.879507496953011 +-1.17805351689458 0.811898585408926 +-0.014006182551384 -0.855070531368256 +1.05536275729537 -0.0407215654850006 +1.2919496037066 2.29178055748343 +-0.0483092553913593 0.315016660839319 +0.377253893762827 0.551010023802519 +2.11492885276675 0.896510727703571 +-2.06066506356001 2.57832121104002 +0.439418245106936 -1.03042285144329 +1.6612491235137 0.279604911804199 +0.148981612175703 -0.817155547440052 +0.581491384655237 -0.397189322859049 +-0.906728111207485 0.995599661022425 +2.07323933392763 1.72313221171498 +1.86393178999424 1.46100195124745 +0.203920360654593 1.44141630828381 +1.24777040630579 1.45024171471596 +-2.12518339604139 2.13095486536622 +1.52807463333011 1.53370461240411 +-1.00800594687462 -0.496832311153412 +-0.189706176519394 0.442924000322819 +1.69896554201841 0.645317282527685 +1.26732464879751 1.67109887301922 +0.514938496053219 -0.234460636973381 +-0.911988072097301 -0.988704368472099 +0.738490886986256 0.645062066614628 +1.72206087037921 2.08895723894238 +-0.292346362024546 1.30616158619523 +0.720038671046495 0.170416902750731 +-2.36888703331351 1.26750460267067 +-1.73304146900773 -0.894560258835554 +1.23942871019244 0.663122121244669 +-0.121451988816261 -0.974075652658939 +-1.8362438865006 0.740432601422071 +-0.594984237104654 -0.638696365058422 +0.88490729033947 1.28329009190202 diff --git a/inst/dev/datasets/cec/mouse_2_spherical/iteration.txt b/inst/dev/datasets/cec/mouse_2_spherical/iteration.txt new file mode 100644 index 00000000..7813681f --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/iteration.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/inst/dev/datasets/cec/mouse_2_spherical/type.txt b/inst/dev/datasets/cec/mouse_2_spherical/type.txt new file mode 100644 index 00000000..4e76fb85 --- /dev/null +++ b/inst/dev/datasets/cec/mouse_2_spherical/type.txt @@ -0,0 +1 @@ +spherical \ No newline at end of file diff --git a/inst/dev/datasets/cec/simple_1/cluster.txt b/inst/dev/datasets/cec/simple_1/cluster.txt new file mode 100644 index 00000000..d3d17122 --- /dev/null +++ b/inst/dev/datasets/cec/simple_1/cluster.txt @@ -0,0 +1,10 @@ +1 +1 +1 +1 +1 +1 +1 +1 +1 +1 diff --git a/inst/dev/datasets/cec/simple_1/dimension.txt b/inst/dev/datasets/cec/simple_1/dimension.txt new file mode 100644 index 00000000..0cfbf088 --- /dev/null +++ b/inst/dev/datasets/cec/simple_1/dimension.txt @@ -0,0 +1 @@ +2 diff --git a/inst/dev/datasets/cec/simple_1/energy.txt b/inst/dev/datasets/cec/simple_1/energy.txt new file mode 100644 index 00000000..eb888a48 --- /dev/null +++ b/inst/dev/datasets/cec/simple_1/energy.txt @@ -0,0 +1 @@ +1.925288 diff --git a/inst/dev/datasets/cec/simple_1/input.txt b/inst/dev/datasets/cec/simple_1/input.txt new file mode 100644 index 00000000..48134b2f --- /dev/null +++ b/inst/dev/datasets/cec/simple_1/input.txt @@ -0,0 +1,10 @@ +1.43048228779078 -0.485696801708548 +0.770226465127688 -0.735300252961767 +0.491075440350476 -0.230390205103122 +-0.499973004580613 -1.15655607693172 +-0.527030825382884 0.0953146045467666 +-0.549843857976897 -0.562465071653098 +0.168450524923163 0.00624731717159897 +-0.308904169794657 0.205551292498891 +-0.917448785698035 -1.73084883006679 +0.726916323509522 0.101879022415894 diff --git a/inst/dev/datasets/svm/2e.data b/inst/dev/datasets/svm/2e.data new file mode 100644 index 00000000..e0142c48 --- /dev/null +++ b/inst/dev/datasets/svm/2e.data @@ -0,0 +1,192 @@ + -1.1730 -3.0962 -1.0000 + 0.6040 6.7376 1.0000 + -0.9481 -4.2203 -1.0000 + -1.3731 -8.6549 -1.0000 + 1.2534 4.8564 1.0000 + 1.1610 6.0382 1.0000 + -0.9106 -8.0659 -1.0000 + -0.9090 -7.4314 -1.0000 + -0.9714 -7.2942 -1.0000 + -0.5359 -9.3491 -1.0000 + -1.1380 -4.8674 -1.0000 + -1.3416 -3.6281 -1.0000 + 0.9062 7.9299 1.0000 + -0.6061 -6.7841 -1.0000 + 1.4233 1.6989 1.0000 + 0.7836 7.5562 1.0000 + 1.1316 5.6921 1.0000 + -0.7075 -5.3228 -1.0000 + -1.2532 -4.2060 -1.0000 + -0.7044 -9.1107 -1.0000 + 0.9048 5.8761 1.0000 + 1.3821 9.7896 1.0000 + -0.9814 -2.2100 -1.0000 + -1.7107 -5.6126 -1.0000 + 1.2471 6.4349 1.0000 + 1.5973 8.5007 1.0000 + 0.7778 6.1033 1.0000 + -0.7947 -5.4596 -1.0000 + 0.9924 7.5340 1.0000 + 0.6303 4.9435 1.0000 + 0.5451 5.4141 1.0000 + 1.3294 2.3551 1.0000 + -0.5806 -9.0351 -1.0000 + 0.2807 6.8794 1.0000 + 0.6068 7.4868 1.0000 + 1.0394 7.1780 1.0000 + 0.8805 7.7064 1.0000 + 1.8401 6.6211 1.0000 + -0.9950 -4.1613 -1.0000 + 1.1445 5.0546 1.0000 + 1.0078 6.6034 1.0000 + -0.5314 -5.3435 -1.0000 + 0.9618 7.6036 1.0000 + -1.1695 -8.0013 -1.0000 + -1.2189 -8.1936 -1.0000 + 0.9457 6.2753 1.0000 + -1.2906 -4.9616 -1.0000 + -0.8462 -7.7063 -1.0000 + -0.3100 -8.5060 -1.0000 + -1.3621 -5.2055 -1.0000 + -0.8222 -5.9377 -1.0000 + 1.1093 5.1549 1.0000 + 0.6519 5.7499 1.0000 + -1.4732 -6.2134 -1.0000 + 0.9087 7.8792 1.0000 + -0.9370 -8.3422 -1.0000 + -1.1819 -6.0846 -1.0000 + 0.8221 6.1357 1.0000 + 0.6983 5.7864 1.0000 + 0.6770 3.4977 1.0000 + -0.8501 -6.6419 -1.0000 + -1.0766 -7.7026 -1.0000 + 0.9672 3.8866 1.0000 + -0.6831 -5.0365 -1.0000 + 1.1488 5.8322 1.0000 + 1.4038 3.6311 1.0000 + -0.4126 -7.4765 -1.0000 + -1.0588 -1.2139 -1.0000 + -1.1542 -5.6220 -1.0000 + -0.8697 -2.7250 -1.0000 + 0.7669 2.8722 1.0000 + -1.5777 -7.0726 -1.0000 + 1.0734 2.9552 1.0000 + 1.1535 3.3239 1.0000 + 1.6523 8.0020 1.0000 + 1.0118 6.0715 1.0000 + 0.8536 9.2626 1.0000 + 0.6062 7.5859 1.0000 + -1.1425 -7.1624 -1.0000 + -0.8970 -3.0160 -1.0000 + -1.2742 -3.6507 -1.0000 + 1.4421 3.5117 1.0000 + 0.9729 6.4232 1.0000 + 0.4776 5.8178 1.0000 + -1.2328 -6.5321 -1.0000 + -0.7149 -3.9596 -1.0000 + -1.1439 -9.6954 -1.0000 + -0.8993 -5.9184 -1.0000 + -1.3016 -4.6589 -1.0000 + -0.8219 -6.0360 -1.0000 + 1.3952 8.0279 1.0000 + -0.9891 -6.5681 -1.0000 + -0.9076 -8.9369 -1.0000 + 0.7231 5.6612 1.0000 + 1.1108 3.7765 1.0000 + -0.9124 -6.9583 -1.0000 + 0.8919 7.5810 1.0000 + 1.0102 2.4124 1.0000 + 0.7386 7.5895 1.0000 + 0.6509 4.3132 1.0000 + -1.1488 -3.2352 -1.0000 + 0.7702 7.3784 1.0000 + 0.5491 7.9651 1.0000 + 1.4829 4.8157 1.0000 + -1.1456 -5.5741 -1.0000 + 0.8614 6.5267 1.0000 + -1.0771 -6.1724 -1.0000 + 0.6693 6.3435 1.0000 + 1.4911 6.8902 1.0000 + -0.9668 -7.4749 -1.0000 + -1.1747 -4.9214 -1.0000 + -0.9509 -2.8987 -1.0000 + -0.3042 -5.7825 -1.0000 + 0.7429 8.1234 1.0000 + 1.5640 5.5170 1.0000 + 1.0359 7.4943 1.0000 + -1.5998 -3.1427 -1.0000 + 0.3503 8.0210 1.0000 + -1.0331 -8.7622 -1.0000 + -0.8382 -8.1100 -1.0000 + -0.7897 -3.8324 -1.0000 + 0.6705 5.9828 1.0000 + -0.7500 -5.5699 -1.0000 + -0.9340 -6.8582 -1.0000 + 0.5638 2.9511 1.0000 + 1.0872 7.7093 1.0000 + 1.0775 7.2635 1.0000 + -0.8477 -6.3612 -1.0000 + 0.5917 3.2380 1.0000 + -0.8088 -7.6834 -1.0000 + 0.5997 6.5189 1.0000 + 0.9911 7.5803 1.0000 + -0.8970 -8.1580 -1.0000 + 1.6537 3.7877 1.0000 + 0.4511 2.8487 1.0000 + -1.0055 -5.2500 -1.0000 + 0.5245 6.1955 1.0000 + 0.8457 4.8373 1.0000 + -1.1115 -2.1850 -1.0000 + -0.9239 -5.7020 -1.0000 + -1.2866 -5.9105 -1.0000 + 0.9626 4.1683 1.0000 + 1.0191 2.9771 1.0000 + 0.7494 6.5943 1.0000 + -0.6749 -9.3978 -1.0000 + 0.4826 7.2081 1.0000 + -1.0289 -7.3142 -1.0000 + -1.4208 -4.5415 -1.0000 + -1.5170 -5.7623 -1.0000 + -1.2583 -5.9918 -1.0000 + -1.3472 -6.4615 -1.0000 + -1.0157 -5.1580 -1.0000 + 0.7972 4.6537 1.0000 + -1.1647 -4.9066 -1.0000 + 0.5729 5.2701 1.0000 + 0.6975 8.2080 1.0000 + 0.5537 13.6067 1.0000 + 1.3317 8.2842 1.0000 + -0.7688 -8.5095 -1.0000 + -1.0430 -3.5328 -1.0000 + -1.3721 -8.5944 -1.0000 + 0.7565 5.5245 1.0000 + 0.9550 6.4451 1.0000 + 0.9149 5.6273 1.0000 + -0.2610 -7.7332 -1.0000 + 1.0853 5.2627 1.0000 + 1.1544 6.2143 1.0000 + -1.6540 -5.6926 -1.0000 + 0.8119 7.4685 1.0000 + -1.0052 -7.4820 -1.0000 + -1.0590 -9.6593 -1.0000 + -0.5250 -9.0984 -1.0000 + -1.2239 -2.4065 -1.0000 + -0.4775 -7.2775 -1.0000 + -0.7097 -4.7670 -1.0000 + 0.7662 3.4879 1.0000 + -1.1560 -11.5698 -1.0000 + -0.6896 -5.1652 -1.0000 + -0.8193 -7.8006 -1.0000 + -0.7856 -10.0448 -1.0000 + -1.0625 -6.6710 -1.0000 + 0.6289 4.6836 1.0000 + 1.4814 4.0170 1.0000 + -1.0966 -3.6550 -1.0000 + -0.6809 -4.0949 -1.0000 + 0.9560 6.2732 1.0000 + 1.3950 8.1016 1.0000 + -1.1255 -9.3660 -1.0000 + -0.8031 -9.6933 -1.0000 + -1.5363 -7.1357 -1.0000 + -0.7202 -10.6595 -1.0000 + 0.9312 3.7417 1.0000 diff --git a/inst/dev/datasets/svm/breast_cancer.data b/inst/dev/datasets/svm/breast_cancer.data new file mode 100644 index 00000000..8acaf949 --- /dev/null +++ b/inst/dev/datasets/svm/breast_cancer.data @@ -0,0 +1,683 @@ +2 1:-0.860107 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.859671 2:-0.111111 3:-0.333333 4:-0.333333 5:-0.111111 6:0.333333 7:1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.857807 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-1 10:-1 +2 1:-0.85768 2:0.111111 3:0.555556 4:0.555556 5:-1 6:-0.555556 7:-0.333333 8:-0.555556 9:0.333333 10:-1 +2 1:-0.857569 2:-0.333333 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.857554 2:0.555556 3:1 4:1 5:0.555556 6:0.333333 7:1 8:0.777778 9:0.333333 10:-1 +2 1:-0.857408 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:1 8:-0.555556 9:-1 10:-1 +2 1:-0.857339 2:-0.777778 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.855171 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-0.111111 +2 1:-0.855171 2:-0.333333 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.854841 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.854709 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.853868 2:-0.111111 3:-0.555556 4:-0.555556 5:-0.555556 6:-0.777778 7:-0.555556 8:-0.333333 9:-0.333333 10:-1 +2 1:-0.85354 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.555556 8:-0.555556 9:-1 10:-1 +4 1:-0.853454 2:0.555556 3:0.333333 4:-0.111111 5:1 6:0.333333 7:0.777778 8:-0.111111 9:-0.111111 10:-0.333333 +4 1:-0.852997 2:0.333333 3:-0.333333 4:0.111111 5:-0.333333 6:0.111111 7:-1 8:-0.333333 9:-0.555556 10:-1 +2 1:-0.852842 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.852671 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.852543 2:1 3:0.333333 4:0.333333 5:0.111111 6:-0.333333 7:1 8:-0.333333 9:-1 10:-0.777778 +2 1:-0.852536 2:0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.851958 2:0.333333 3:-0.555556 4:-0.777778 5:1 6:-0.111111 7:1 8:-0.111111 9:-0.333333 10:-0.333333 +4 1:-0.851957 2:1 3:-0.111111 4:-0.111111 5:-0.555556 6:0.111111 7:0.333333 8:0.333333 9:1 10:-1 +2 1:-0.85163 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.851217 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.850295 2:-0.111111 3:-0.777778 4:-0.555556 5:-0.333333 6:-0.777778 7:0.333333 8:-0.555556 9:0.111111 10:-1 +2 1:-0.850198 2:-0.555556 3:-0.777778 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.850107 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.850038 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.849517 2:-1 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.849517 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.849393 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.849331 2:1 3:0.333333 4:0.333333 5:-0.555556 6:0.555556 7:-0.111111 8:0.333333 9:-0.333333 10:-0.555556 +2 1:-0.848968 2:-0.777778 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.848891 2:-0.555556 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.848267 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.848135 2:1 3:1 4:1 5:0.555556 6:0.111111 7:-1 8:0.555556 9:0.777778 10:-1 +2 1:-0.847895 2:0.111111 3:-0.777778 4:-1 5:-1 6:-1 7:-1 8:0.333333 9:-1 10:-1 +4 1:-0.847478 2:-0.111111 3:-0.333333 4:-0.333333 5:0.777778 6:-0.777778 7:1 8:-0.111111 9:0.111111 10:-1 +4 1:-0.846481 2:-0.777778 3:-0.111111 4:-0.555556 5:-0.555556 6:0.111111 7:0.333333 8:0.333333 9:-0.111111 10:-1 +4 1:-0.845249 2:1 3:-0.333333 4:-0.555556 5:-1 6:-0.555556 7:-0.555556 8:0.111111 9:-0.111111 10:-0.777778 +4 1:-0.845097 2:0.111111 3:1 4:1 5:-0.777778 6:0.555556 7:1 8:0.333333 9:-0.555556 10:-0.555556 +4 1:-0.844791 2:-0.111111 3:0.111111 4:-0.111111 5:0.111111 6:1 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.844637 2:1 3:1 4:1 5:-0.333333 6:0.555556 7:-1 8:0.555556 9:1 10:-1 +2 1:-0.84462 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-0.777778 +4 1:-0.84439 2:-0.555556 3:0.333333 4:0.333333 5:-0.333333 6:-0.333333 7:0.777778 8:-0.333333 9:0.555556 10:-1 +2 1:-0.844351 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.844265 2:-0.333333 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.844156 2:0.333333 3:0.555556 4:0.333333 5:-0.777778 6:-0.333333 7:0.555556 8:-0.555556 9:0.555556 10:-0.777778 +4 1:-0.843926 2:0.777778 3:-0.111111 4:0.555556 5:-1 6:-0.777778 7:-0.555556 8:-0.777778 9:-1 10:-0.111111 +4 1:-0.843914 2:-0.111111 3:-0.555556 4:-0.555556 5:-0.333333 6:-0.777778 7:-0.333333 8:-0.555556 9:-0.333333 10:-1 +4 1:-0.843667 2:1 3:-0.555556 4:0.111111 5:-0.777778 6:-0.555556 7:-0.111111 8:-0.333333 9:1 10:-0.777778 +4 1:-0.843607 2:-0.111111 3:-0.111111 4:-0.111111 5:0.555556 6:1 7:0.555556 8:0.333333 9:-0.555556 10:0.333333 +4 1:-0.843604 2:1 3:-0.111111 4:-0.111111 5:0.111111 6:0.555556 7:0.555556 8:0.333333 9:-1 10:-1 +4 1:-0.843496 2:1 3:0.111111 4:0.111111 5:-0.555556 6:-0.333333 7:-0.111111 8:-0.555556 9:0.111111 10:-1 +4 1:-0.843352 2:0.555556 3:1 4:1 5:-1 6:-0.555556 7:0.111111 8:-0.555556 9:0.777778 10:-1 +4 1:-0.843228 2:0.555556 3:-0.777778 4:-0.333333 5:-1 6:-0.111111 7:-1 8:-0.111111 9:-0.333333 10:-0.333333 +4 1:-0.843162 2:-0.111111 3:-0.777778 4:-0.555556 5:-1 6:0.111111 7:1 8:-0.111111 9:-1 10:-1 +4 1:-0.843099 2:0.777778 3:-0.111111 4:-0.111111 5:-0.777778 6:-0.777778 7:-0.777778 8:-0.111111 9:-1 10:-1 +4 1:-0.842893 2:-0.111111 3:-0.555556 4:-0.111111 5:-0.111111 6:-0.555556 7:-0.555556 8:-0.333333 9:1 10:-1 +2 1:-0.842892 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.777778 9:-1 10:-1 +4 1:-0.842769 2:0.777778 3:1 4:1 5:-1 6:1 7:0.555556 8:-0.555556 9:-0.555556 10:-1 +4 1:-0.842766 2:0.111111 3:-0.555556 4:-0.333333 5:-1 6:-0.111111 7:-0.777778 8:-0.555556 9:0.777778 10:-1 +2 1:-0.842757 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.842637 2:1 3:-0.333333 4:-0.777778 5:-1 6:-0.555556 7:-0.777778 8:-0.333333 9:-0.555556 10:1 +2 1:-0.842614 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.842481 2:-0.111111 3:-0.555556 4:-0.333333 5:-1 6:0.555556 7:1 8:-0.333333 9:0.777778 10:-1 +4 1:-0.842105 2:0.555556 3:-0.555556 4:0.555556 5:-0.555556 6:-0.333333 7:0.777778 8:0.555556 9:0.777778 10:0.555556 +2 1:-0.84193 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.841902 2:-0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.841731 2:0.111111 3:1 4:-0.777778 5:0.555556 6:1 7:-0.777778 8:0.333333 9:0.555556 10:1 +2 1:-0.841494 2:-1 3:-0.555556 4:-0.555556 5:-0.777778 6:-0.777778 7:-1 8:0.333333 9:-0.777778 10:-1 +4 1:-0.841437 2:0.777778 3:-0.333333 4:-0.111111 5:1 6:0.111111 7:1 8:-0.333333 9:0.555556 10:-1 +4 1:-0.84123 2:1 3:0.111111 4:-0.333333 5:-1 6:-0.555556 7:-0.333333 8:-0.555556 9:-0.777778 10:-0.555556 +2 1:-0.840502 2:-1 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-0.777778 8:-0.333333 9:-0.777778 10:-1 +2 1:-0.840344 2:-1 3:-1 4:-0.333333 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.840241 2:-0.111111 3:-0.555556 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.840227 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.555556 8:-0.555556 9:-1 10:-1 +2 1:-0.839778 2:-0.777778 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.839626 2:-0.777778 3:-0.777778 4:-0.777778 5:-1 6:-1 7:-1 8:0.333333 9:-1 10:-1 +2 1:-0.838607 2:-0.333333 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.838607 2:-0.111111 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.838149 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:0.333333 9:-1 10:-1 +4 1:-0.838052 2:-0.555556 3:-0.111111 4:0.333333 5:0.555556 6:0.555556 7:0.777778 8:0.333333 9:1 10:0.333333 +4 1:-0.838044 2:-0.111111 3:1 4:0.111111 5:-1 6:1 7:-0.333333 8:-0.333333 9:1 10:1 +4 1:-0.837965 2:-0.555556 3:-0.555556 4:0.111111 5:-0.333333 6:-0.111111 7:0.555556 8:-0.333333 9:-0.333333 10:-1 +4 1:-0.837876 2:-0.555556 3:0.111111 4:0.111111 5:0.111111 6:-0.111111 7:1 8:0.111111 9:0.555556 10:-0.555556 +2 1:-0.83736 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.83688 2:-0.777778 3:-1 4:-1 5:-0.777778 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.836771 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.83667 2:-0.555556 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.836553 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.836476 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.836143 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.835607 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.835423 2:-0.777778 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.83535 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.835329 2:0.777778 3:0.111111 4:0.777778 5:-0.777778 6:1 7:0.111111 8:-0.777778 9:0.777778 10:1 +4 1:-0.835224 2:0.333333 3:-0.111111 4:0.111111 5:1 6:-0.111111 7:1 8:0.333333 9:0.777778 10:-0.333333 +4 1:-0.835221 2:1 3:-0.555556 4:-0.111111 5:-1 6:1 7:-0.111111 8:-0.555556 9:1 10:-0.777778 +4 1:-0.835103 2:-0.777778 3:-0.555556 4:-0.333333 5:-0.333333 6:-0.777778 7:-0.111111 8:-0.777778 9:-0.111111 10:-1 +2 1:-0.835099 2:-0.333333 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.834966 2:0.555556 3:-0.777778 4:-0.555556 5:-1 6:0.111111 7:-0.555556 8:0.333333 9:-1 10:-1 +4 1:-0.83491 2:1 3:1 4:1 5:1 6:1 7:-1 8:0.555556 9:0.555556 10:0.555556 +4 1:-0.834863 2:0.333333 3:-0.555556 4:-0.333333 5:-0.333333 6:-0.555556 7:-0.555556 8:-0.555556 9:-0.777778 10:0.333333 +4 1:-0.834658 2:1 3:1 4:1 5:0.555556 6:-0.777778 7:1 8:-0.333333 9:-1 10:-1 +4 1:-0.834658 2:-1 3:0.111111 4:0.555556 5:1 6:0.555556 7:1 8:-0.111111 9:0.333333 10:-1 +2 1:-0.834465 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.555556 10:-1 +4 1:-0.834465 2:0.111111 3:-0.111111 4:-0.333333 5:-0.333333 6:-0.555556 7:0.777778 8:0.333333 9:0.555556 10:-0.555556 +2 1:-0.834453 2:-1 3:-0.555556 4:-1 5:-0.777778 6:-0.777778 7:-0.777778 8:-0.111111 9:-0.555556 10:-0.777778 +4 1:-0.834445 2:0.555556 3:0.111111 4:-0.333333 5:-0.555556 6:-0.111111 7:0.777778 8:-0.555556 9:-1 10:-1 +4 1:-0.834399 2:1 3:-0.555556 4:-0.555556 5:1 6:-0.777778 7:1 8:0.333333 9:-0.555556 10:-0.555556 +4 1:-0.83424 2:1 3:1 4:1 5:-0.555556 6:1 7:0.555556 8:0.555556 9:-1 10:-1 +2 1:-0.834238 2:-0.555556 3:-0.555556 4:-0.777778 5:-1 6:-0.777778 7:-0.555556 8:-0.555556 9:-1 10:-1 +2 1:-0.834221 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-1 9:-1 10:-1 +2 1:-0.834221 2:0.555556 3:-0.555556 4:-0.555556 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-0.777778 10:-1 +4 1:-0.834197 2:-0.333333 3:-0.111111 4:-0.111111 5:1 6:-0.333333 7:1 8:0.333333 9:-0.111111 10:0.555556 +2 1:-0.834196 2:-1 3:-1 4:-1 5:-1 6:-0.333333 7:-0.555556 8:-1 9:-1 10:-1 +2 1:-0.834171 2:-0.555556 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-1 10:-1 +2 1:-0.834115 2:-1 3:-1 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.834115 2:-0.333333 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-1 10:-1 +4 1:-0.834104 2:1 3:1 4:1 5:-0.777778 6:1 7:1 8:-0.111111 9:-0.555556 10:-0.555556 +4 1:-0.834059 2:-0.111111 3:-0.555556 4:-0.111111 5:-1 6:0.555556 7:1 8:-0.111111 9:-0.555556 10:-1 +4 1:-0.833834 2:-0.111111 3:-0.333333 4:0.111111 5:0.333333 6:0.777778 7:0.333333 8:0.555556 9:1 10:-1 +2 1:-0.833764 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.833693 2:0.333333 3:-0.111111 4:-0.555556 5:0.333333 6:-0.333333 7:1 8:0.333333 9:-0.111111 10:-0.111111 +2 1:-0.833671 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.833616 2:0.555556 3:-0.555556 4:-0.111111 5:-0.333333 6:-0.111111 7:1 8:-1 9:0.111111 10:-0.777778 +2 1:-0.833599 2:-1 3:-1 4:-1 5:-1 6:1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.833439 2:-0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.833254 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.833198 2:-0.111111 3:1 4:0.555556 5:1 6:0.555556 7:1 8:-0.555556 9:0.111111 10:-0.555556 +2 1:-0.833149 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.833103 2:-0.555556 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.833025 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-0.555556 10:-1 +2 1:-0.832868 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.832867 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832743 2:-0.333333 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.832702 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832643 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.832632 2:0.777778 3:-0.111111 4:-0.111111 5:-0.333333 6:-0.333333 7:-0.111111 8:-0.333333 9:-0.555556 10:-0.555556 +2 1:-0.832602 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-1 9:-1 10:-1 +2 1:-0.832594 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.83239 2:-0.555556 3:-0.333333 4:-0.111111 5:-0.777778 6:0.111111 7:0.555556 8:-0.333333 9:-1 10:-1 +2 1:-0.832389 2:-1 3:-1 4:-1 5:-1 6:-0.555556 7:-0.777778 8:-0.777778 9:-1 10:-1 +2 1:-0.832114 2:-0.555556 3:-1 4:-1 5:-0.555556 6:0.555556 7:-1 8:-0.111111 9:0.555556 10:-1 +4 1:-0.832062 2:0.555556 3:0.555556 4:0.333333 5:-0.333333 6:1 7:1 8:0.333333 9:0.555556 10:0.333333 +2 1:-0.831962 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.831843 2:0.333333 3:-0.777778 4:-0.333333 5:-1 6:0.111111 7:1 8:-0.111111 9:-0.333333 10:-0.555556 +4 1:-0.83184 2:1 3:1 4:0.555556 5:0.111111 6:-0.333333 7:-0.111111 8:0.555556 9:1 10:-1 +2 1:-0.831675 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-0.555556 8:-1 9:-1 10:-1 +2 1:-0.831661 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.831386 2:-0.111111 3:-0.111111 4:-0.111111 5:0.111111 6:-0.555556 7:1 8:-0.555556 9:-1 10:-1 +2 1:-0.831272 2:-1 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.831254 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.830794 2:0.777778 3:0.777778 4:1 5:-0.555556 6:0.111111 7:1 8:0.333333 9:1 10:0.111111 +4 1:-0.830701 2:1 3:0.333333 4:0.333333 5:-0.333333 6:-0.111111 7:1 8:-0.111111 9:0.333333 10:-0.777778 +2 1:-0.830676 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.830648 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.830623 2:-1 3:-1 4:-1 5:-0.777778 6:-1 7:-0.555556 8:-1 9:-1 10:0.333333 +2 1:-0.830542 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.555556 9:-0.777778 10:-1 +4 1:-0.83054 2:-0.111111 3:0.111111 4:0.333333 5:0.555556 6:0.555556 7:1 8:-0.555556 9:1 10:-0.555556 +4 1:-0.83052 2:1 3:0.555556 4:1 5:1 6:0.111111 7:-1 8:-0.555556 9:-1 10:1 +2 1:-0.830443 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.830357 2:-1 3:-1 4:-1 5:-0.777778 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.83028 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.830243 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.830125 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.830114 2:0.111111 3:1 4:1 5:1 6:0.555556 7:1 8:1 9:1 10:0.333333 +4 1:-0.830107 2:0.555556 3:0.111111 4:-0.111111 5:-0.333333 6:-0.555556 7:1 8:0.111111 9:-1 10:-1 +4 1:-0.830098 2:-0.111111 3:0.555556 4:0.333333 5:0.333333 6:1 7:1 8:-0.111111 9:0.333333 10:-1 +2 1:-0.829966 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.829951 2:-0.111111 3:1 4:1 5:-0.555556 6:0.555556 7:-1 8:-0.111111 9:1 10:-0.555556 +2 1:-0.829923 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.82982 2:-0.111111 3:-0.555556 4:-0.555556 5:-0.555556 6:0.111111 7:1 8:-0.555556 9:-1 10:-1 +2 1:-0.829778 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.829607 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.829509 2:0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.829473 2:-0.111111 3:0.555556 4:0.555556 5:0.555556 6:-0.111111 7:1 8:0.333333 9:0.555556 10:-1 +4 1:-0.829407 2:0.555556 3:0.333333 4:0.111111 5:-0.333333 6:-0.333333 7:1 8:-0.111111 9:-1 10:-1 +2 1:-0.829331 2:-0.777778 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.82924 2:-1 3:-0.111111 4:0.555556 5:0.111111 6:-0.111111 7:0.555556 8:0.333333 9:1 10:-1 +4 1:-0.829218 2:1 3:-0.111111 4:0.111111 5:1 6:0.111111 7:1 8:0.333333 9:0.333333 10:1 +4 1:-0.829047 2:-0.111111 3:0.555556 4:-0.333333 5:1 6:-0.111111 7:0.555556 8:0.777778 9:1 10:-1 +2 1:-0.829 2:-1 3:-0.777778 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.828603 2:1 3:1 4:1 5:0.555556 6:0.111111 7:0.555556 8:0.333333 9:1 10:-1 +4 1:-0.828567 2:0.333333 3:-0.111111 4:1 5:1 6:1 7:1 8:-0.333333 9:1 10:-0.555556 +2 1:-0.828413 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.82841 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.828385 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.828385 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.828243 2:0.555556 3:-0.333333 4:-0.333333 5:-0.111111 6:-0.333333 7:0.333333 8:0.333333 9:0.555556 10:-0.777778 +2 1:-0.828241 2:-0.111111 3:-1 4:-1 5:-0.333333 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.828135 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.828066 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.828005 2:0.777778 3:0.333333 4:0.333333 5:-0.111111 6:-0.111111 7:1 8:0.333333 9:0.555556 10:-0.555556 +4 1:-0.827747 2:1 3:0.555556 4:0.555556 5:-0.333333 6:1 7:1 8:0.555556 9:-1 10:-1 +2 1:-0.827709 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.827694 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.827662 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.827536 2:-0.111111 3:1 4:1 5:0.777778 6:0.111111 7:1 8:0.333333 9:1 10:-0.111111 +4 1:-0.827441 2:1 3:1 4:0.777778 5:-0.555556 6:0.333333 7:-0.111111 8:-0.555556 9:-0.111111 10:-1 +2 1:-0.827423 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.827423 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.827342 2:-0.111111 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.827324 2:0.555556 3:1 4:1 5:1 6:-0.111111 7:1 8:0.555556 9:1 10:0.111111 +4 1:-0.827274 2:0.555556 3:1 4:0.555556 5:0.555556 6:-0.333333 7:0.555556 8:0.333333 9:0.333333 10:-1 +2 1:-0.827204 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.826975 2:1 3:1 4:1 5:1 6:0.333333 7:1 8:0.333333 9:1 10:-0.333333 +4 1:-0.826947 2:1 3:1 4:1 5:1 6:-0.555556 7:1 8:1 9:0.111111 10:-1 +4 1:-0.826815 2:0.555556 3:0.333333 4:0.555556 5:0.333333 6:-0.111111 7:-0.111111 8:-0.111111 9:1 10:-0.777778 +2 1:-0.826763 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.826741 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.826687 2:0.111111 3:1 4:0.333333 5:0.333333 6:0.111111 7:-0.333333 8:0.555556 9:1 10:-0.777778 +2 1:-0.826661 2:0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.826607 2:-1 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.826387 2:1 3:0.111111 4:-0.333333 5:-0.555556 6:1 7:1 8:0.777778 9:1 10:-1 +4 1:-0.826355 2:-0.333333 3:-1 4:-1 5:-0.555556 6:-1 7:-0.111111 8:-0.777778 9:-1 10:-1 +4 1:-0.826266 2:0.333333 3:-0.111111 4:0.111111 5:-0.555556 6:-0.555556 7:0.555556 8:0.333333 9:-0.333333 10:-1 +4 1:-0.826176 2:1 3:-0.111111 4:-0.111111 5:0.111111 6:-0.555556 7:1 8:0.333333 9:0.777778 10:-0.777778 +2 1:-0.826171 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.826136 2:1 3:-0.111111 4:0.333333 5:-0.333333 6:-0.333333 7:1 8:0.555556 9:0.777778 10:-1 +4 1:-0.826036 2:0.555556 3:0.777778 4:0.777778 5:-0.111111 6:-0.555556 7:-0.111111 8:0.333333 9:0.333333 10:-1 +2 1:-0.826012 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.825733 2:1 3:1 4:1 5:-0.555556 6:1 7:1 8:0.777778 9:1 10:-1 +4 1:-0.825657 2:0.333333 3:-0.333333 4:0.333333 5:-0.333333 6:-0.555556 7:0.333333 8:0.333333 9:0.111111 10:-1 +4 1:-0.825552 2:0.111111 3:0.555556 4:0.333333 5:-0.111111 6:0.111111 7:0.555556 8:0.555556 9:0.777778 10:-0.777778 +2 1:-0.825505 2:0.555556 3:-0.333333 4:0.111111 5:-0.555556 6:-0.555556 7:-1 8:-0.333333 9:-0.555556 10:-1 +4 1:-0.825427 2:1 3:-0.333333 4:-0.111111 5:-0.111111 6:-0.111111 7:1 8:-0.333333 9:-1 10:-1 +2 1:-0.824857 2:-0.555556 3:-0.555556 4:-0.777778 5:-1 6:-0.555556 7:-1 8:-0.555556 9:0.111111 10:-1 +4 1:-0.824033 2:1 3:0.555556 4:0.555556 5:-0.777778 6:0.555556 7:1 8:-0.333333 9:0.555556 10:1 +4 1:-0.824015 2:0.777778 3:0.555556 4:0.555556 5:-0.111111 6:0.111111 7:-0.777778 8:-0.333333 9:1 10:-0.333333 +4 1:-0.823913 2:0.555556 3:1 4:1 5:0.555556 6:0.111111 7:0.777778 8:-0.555556 9:1 10:1 +4 1:-0.82378 2:1 3:-0.333333 4:-0.555556 5:-0.777778 6:-0.555556 7:1 8:-0.111111 9:-0.555556 10:-0.777778 +2 1:-0.819714 2:-0.111111 3:-1 4:-0.555556 5:-0.555556 6:-0.777778 7:-0.777778 8:-0.777778 9:-0.555556 10:-1 +2 1:-0.818876 2:-0.555556 3:-1 4:-1 5:-0.555556 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.818737 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.990339 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-0.111111 9:-1 10:-1 +2 1:-0.817466 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.817131 2:-0.111111 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-0.777778 8:-0.555556 9:-1 10:-1 +4 1:-0.987826 2:0.555556 3:1 4:1 5:0.555556 6:-0.111111 7:1 8:0.333333 9:0.555556 10:-1 +4 1:-0.987742 2:0.555556 3:-0.333333 4:-0.333333 5:-1 6:-0.777778 7:0.777778 8:-0.555556 9:-0.555556 10:-1 +2 1:-0.984444 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:0.111111 10:-1 +2 1:-0.981997 2:-1 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.980901 2:1 3:-0.333333 4:-0.333333 5:1 6:-0.777778 7:1 8:-0.111111 9:-0.555556 10:-0.555556 +2 1:-0.857569 2:0.111111 3:-0.555556 4:-0.555556 5:-0.111111 6:-0.555556 7:1 8:-0.555556 9:-0.111111 10:-0.555556 +4 1:-0.845097 2:0.111111 3:1 4:1 5:-0.777778 6:0.555556 7:1 8:0.333333 9:-0.555556 10:-0.555556 +4 1:-0.842769 2:0.777778 3:1 4:1 5:-1 6:1 7:0.555556 8:-0.555556 9:-0.555556 10:-1 +4 1:-0.83491 2:-0.111111 3:0.111111 4:0.111111 5:-0.777778 6:-0.333333 7:1 8:-0.555556 9:0.111111 10:-1 +2 1:-0.832868 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832868 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.830443 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.973177 2:-0.111111 3:0.333333 4:0.333333 5:-1 6:-0.111111 7:0.555556 8:-0.555556 9:-0.333333 10:-1 +4 1:-0.971284 2:1 3:-0.111111 4:0.555556 5:1 6:-0.555556 7:1 8:-0.111111 9:-1 10:-0.555556 +4 1:-0.970105 2:-0.111111 3:1 4:1 5:0.111111 6:1 7:1 8:1 9:0.111111 10:-0.111111 +4 1:-0.968522 2:0.555556 3:0.555556 4:0.777778 5:-0.333333 6:-0.111111 7:1 8:0.333333 9:0.555556 10:-1 +4 1:-0.964179 2:1 3:-0.333333 4:-0.333333 5:1 6:0.111111 7:1 8:-0.111111 9:-0.111111 10:-1 +4 1:-0.962504 2:0.333333 3:0.777778 4:-0.333333 5:1 6:1 7:-0.555556 8:-0.111111 9:-0.555556 10:-0.555556 +2 1:-0.832868 2:-0.111111 3:-1 4:-0.333333 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +4 1:-0.830443 2:1 3:1 4:0.111111 5:-0.555556 6:-0.555556 7:1 8:-0.333333 9:-0.555556 10:-0.777778 +4 1:-0.961571 2:-0.555556 3:-0.555556 4:-0.111111 5:-0.777778 6:-0.555556 7:1 8:0.333333 9:-1 10:-1 +4 1:-0.961011 2:1 3:0.555556 4:0.555556 5:-0.777778 6:-0.555556 7:-0.333333 8:0.555556 9:0.333333 10:0.555556 +2 1:-0.951949 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.951092 2:0.555556 3:-0.333333 4:0.333333 5:-1 6:-0.555556 7:1 8:-0.555556 9:0.777778 10:-0.777778 +2 1:-0.948013 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.961571 2:-0.555556 3:-0.555556 4:-0.111111 5:-0.777778 6:-0.555556 7:1 8:0.333333 9:-1 10:-1 +4 1:-0.945407 2:0.333333 3:-0.777778 4:-0.333333 5:-1 6:-0.555556 7:-0.333333 8:-0.555556 9:-0.555556 10:-1 +2 1:-0.94502 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.944568 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.941918 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.941318 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.938238 2:1 3:-0.111111 4:0.333333 5:-0.555556 6:-0.555556 7:0.333333 8:-0.555556 9:-0.555556 10:0.555556 +2 1:-0.936837 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.93678 2:-0.777778 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.936555 2:-1 3:-0.333333 4:-0.555556 5:1 6:-0.333333 7:1 8:-0.111111 9:0.111111 10:-1 +4 1:-0.935943 2:1 3:-0.333333 4:0.111111 5:-1 6:-0.777778 7:1 8:-0.111111 9:-0.555556 10:-1 +4 1:-0.933558 2:0.333333 3:-0.333333 4:-0.111111 5:1 6:-0.777778 7:1 8:-0.555556 9:0.555556 10:-0.777778 +4 1:-0.930701 2:0.555556 3:1 4:1 5:1 6:0.555556 7:1 8:1 9:0.333333 10:-0.555556 +4 1:-0.930408 2:1 3:1 4:1 5:1 6:1 7:1 8:-0.333333 9:1 10:1 +2 1:-0.929511 2:-0.555556 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.928283 2:0.111111 3:-1 4:-0.555556 5:-1 6:-0.333333 7:-0.111111 8:-0.111111 9:1 10:-1 +4 1:-0.926428 2:-0.111111 3:0.111111 4:0.111111 5:0.555556 6:0.111111 7:1 8:-0.333333 9:1 10:-0.333333 +2 1:-0.925725 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.925606 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.919664 2:1 3:-0.333333 4:-0.333333 5:0.111111 6:-0.777778 7:1 8:-0.777778 9:-0.555556 10:-1 +4 1:-0.918849 2:-0.111111 3:-0.111111 4:0.333333 5:0.555556 6:0.111111 7:1 8:0.333333 9:-0.333333 10:-1 +2 1:-0.917427 2:-0.111111 3:-0.555556 4:-0.333333 5:-0.555556 6:-0.333333 7:-0.111111 8:-0.333333 9:0.333333 10:-1 +2 1:-0.916089 2:0.555556 3:-0.777778 4:-1 5:-1 6:-0.111111 7:-1 8:-1 9:-1 10:-1 +4 1:-1 2:0.777778 3:-1 4:-0.777778 5:0.111111 6:-0.333333 7:1 8:0.333333 9:0.333333 10:-0.777778 +4 1:-0.914499 2:0.555556 3:-0.333333 4:1 5:-0.111111 6:-0.333333 7:-0.333333 8:0.333333 9:1 10:-1 +2 1:-0.914457 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.913767 2:1 3:1 4:1 5:0.333333 6:0.777778 7:1 8:0.333333 9:1 10:1 +2 1:-0.912847 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.911821 2:0.555556 3:-0.555556 4:-0.333333 5:0.777778 6:-0.555556 7:1 8:-0.555556 9:-0.555556 10:-1 +4 1:-0.910945 2:1 3:0.555556 4:-0.333333 5:-0.333333 6:-0.333333 7:1 8:-0.555556 9:1 10:-0.333333 +2 1:-0.909982 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.909855 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.909816 2:0.333333 3:0.555556 4:0.333333 5:0.111111 6:-0.333333 7:-0.555556 8:0.555556 9:0.555556 10:-0.333333 +2 1:-0.908855 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-0.111111 9:-1 10:-1 +2 1:-0.907164 2:-0.777778 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.906705 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.906168 2:0.555556 3:0.111111 4:-0.333333 5:1 6:1 7:-1 8:-0.555556 9:-0.111111 10:-1 +2 1:-0.905858 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.904305 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.903958 2:-0.111111 3:-0.111111 4:-0.111111 5:-0.777778 6:-0.111111 7:1 8:-0.333333 9:-0.555556 10:-1 +4 1:-0.90353 2:0.111111 3:0.555556 4:0.333333 5:0.555556 6:0.111111 7:0.555556 8:0.555556 9:0.777778 10:-1 +2 1:-0.902133 2:-1 3:-1 4:-1 5:-1 6:-0.111111 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.901709 2:-0.333333 3:-0.333333 4:-0.333333 5:-0.333333 6:0.111111 7:-0.111111 8:0.333333 9:-0.555556 10:-1 +4 1:-0.900305 2:0.333333 3:0.111111 4:-0.555556 5:-0.777778 6:-0.111111 7:1 8:0.333333 9:-0.333333 10:0.111111 +2 1:-0.899893 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.899866 2:-0.111111 3:-0.333333 4:0.111111 5:1 6:-0.777778 7:1 8:-0.333333 9:-1 10:-1 +2 1:-0.89887 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.898443 2:-0.555556 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.555556 10:-1 +4 1:-0.897016 2:1 3:-1 4:-1 5:-1 6:-0.777778 7:1 8:-0.111111 9:-0.333333 10:-1 +2 1:-0.896533 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.895956 2:0.555556 3:1 4:-0.555556 5:-0.777778 6:0.111111 7:-0.333333 8:-0.555556 9:1 10:-1 +4 1:-0.89592 2:1 3:-0.333333 4:0.111111 5:-0.333333 6:-0.111111 7:1 8:0.333333 9:-1 10:-1 +4 1:-0.998056 2:1 3:-0.333333 4:0.333333 5:-0.777778 6:-0.777778 7:0.555556 8:0.111111 9:-1 10:-1 +2 1:-0.895213 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-0.777778 +2 1:-0.894453 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +4 1:-0.892191 2:-0.111111 3:-0.333333 4:0.111111 5:0.111111 6:-0.333333 7:1 8:-0.333333 9:-0.555556 10:-1 +4 1:-0.89213 2:0.555556 3:0.111111 4:0.333333 5:-0.555556 6:-0.555556 7:1 8:-0.555556 9:-0.333333 10:-0.777778 +2 1:-0.891066 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.890381 2:0.111111 3:-0.111111 4:-0.111111 5:0.555556 6:-0.333333 7:1 8:-0.555556 9:-0.333333 10:-1 +2 1:-0.890217 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.904305 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.889023 2:0.555556 3:-0.111111 4:-0.111111 5:-0.111111 6:-0.777778 7:1 8:-0.333333 9:-0.555556 10:-1 +4 1:-0.888501 2:1 3:-0.555556 4:-0.555556 5:-1 6:-0.777778 7:1 8:0.333333 9:0.111111 10:-1 +2 1:-0.888473 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.887851 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.887755 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.886572 2:0.333333 3:0.111111 4:-0.333333 5:0.555556 6:1 7:1 8:0.777778 9:-0.111111 10:-0.555556 +2 1:-0.885961 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.885398 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.555556 7:-1 8:-1 9:-0.555556 10:-1 +2 1:-0.885312 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-0.555556 10:-1 +4 1:-0.885169 2:-0.555556 3:-0.333333 4:-0.333333 5:1 6:-0.111111 7:-1 8:-0.555556 9:-0.555556 10:-1 +4 1:-0.885118 2:-0.333333 3:-0.777778 4:-0.555556 5:-0.111111 6:-0.555556 7:0.555556 8:0.333333 9:0.111111 10:-1 +2 1:-0.88454 2:-0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.884444 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.882987 2:-0.555556 3:-0.333333 4:-0.111111 5:-0.555556 6:0.333333 7:-0.555556 8:-0.333333 9:0.111111 10:-1 +4 1:-0.88239 2:-0.777778 3:0.333333 4:1 5:1 6:0.333333 7:1 8:-0.333333 9:0.777778 10:-0.333333 +2 1:-0.881689 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.881353 2:-0.333333 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-0.777778 10:-1 +4 1:-0.881145 2:-0.111111 3:-0.555556 4:-0.555556 5:-1 6:-0.555556 7:-0.555556 8:-0.555556 9:-0.555556 10:-0.555556 +4 1:-0.881118 2:0.555556 3:1 4:1 5:0.333333 6:1 7:1 8:0.333333 9:-0.555556 10:0.555556 +4 1:-0.880076 2:0.555556 3:1 4:-0.111111 5:-0.555556 6:0.555556 7:-0.333333 8:-0.333333 9:1 10:-0.555556 +4 1:-0.878997 2:1 3:-0.555556 4:-0.111111 5:-0.333333 6:-0.555556 7:0.333333 8:-0.555556 9:-0.111111 10:-0.555556 +4 1:-0.878438 2:0.111111 3:1 4:1 5:1 6:1 7:1 8:0.555556 9:1 10:1 +4 1:-0.878341 2:-0.555556 3:1 4:-0.555556 5:1 6:0.111111 7:1 8:-0.111111 9:-1 10:-0.333333 +2 1:-0.876813 2:-0.555556 3:-0.777778 4:-0.777778 5:-1 6:-0.333333 7:-0.555556 8:-0.777778 9:-1 10:-1 +2 1:-0.876761 2:-0.333333 3:-0.333333 4:-0.333333 5:-0.777778 6:-0.777778 7:-0.555556 8:-0.777778 9:-1 10:-1 +2 1:-0.875584 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.875469 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.995169 2:0.111111 3:1 4:1 5:1 6:0.555556 7:1 8:0.333333 9:1 10:0.333333 +4 1:-0.985524 2:-0.111111 3:0.555556 4:0.555556 5:1 6:-0.111111 7:1 8:0.555556 9:1 10:-0.555556 +2 1:-0.95835 2:-1 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.945452 2:-1 3:-1 4:-0.555556 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.935899 2:-0.333333 3:-0.555556 4:-0.777778 5:-1 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.935766 2:-1 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.935766 2:-0.333333 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.931586 2:-0.111111 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.925725 2:-0.555556 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.914411 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.913772 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.911751 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.911227 2:-0.555556 3:-1 4:-1 5:-0.333333 6:-0.555556 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.907012 2:-0.111111 3:-0.555556 4:-0.333333 5:-1 6:-0.333333 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.889168 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.883048 2:1 3:0.111111 4:-0.555556 5:0.111111 6:-0.333333 7:1 8:0.333333 9:0.555556 10:-0.333333 +2 1:-0.859737 2:-0.555556 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.856787 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.85621 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.849494 2:-0.555556 3:-0.555556 4:-0.777778 5:-0.777778 6:-0.555556 7:-1 8:-1 9:-0.777778 10:-0.555556 +4 1:-0.848128 2:0.333333 3:0.111111 4:0.111111 5:-0.555556 6:-0.777778 7:1 8:0.333333 9:-1 10:-1 +2 1:-0.843 2:-0.111111 3:-0.555556 4:-0.555556 5:-0.777778 6:-0.555556 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.843 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.842679 2:-0.111111 3:-1 4:-1 5:-1 6:-0.555556 7:-0.777778 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.840484 2:-1 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.837449 2:1 3:0.555556 4:0.333333 5:-0.333333 6:-0.555556 7:1 8:0.333333 9:0.777778 10:-1 +2 1:-0.836809 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.836476 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.836453 2:-1 3:-0.777778 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.834978 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.833797 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.830798 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.830767 2:-0.555556 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.829297 2:-1 3:-0.777778 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.828558 2:-0.555556 3:1 4:0.555556 5:0.333333 6:0.111111 7:0.777778 8:0.777778 9:-0.555556 10:0.555556 +2 1:-0.828181 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.826805 2:-0.111111 3:-0.555556 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.826759 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.333333 8:-1 9:-1 10:-1 +2 1:-0.826724 2:-1 3:-0.777778 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-0.777778 10:-1 +2 1:-0.82577 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.825483 2:-0.333333 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.825079 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.824738 2:-0.777778 3:-0.555556 4:-0.777778 5:-0.777778 6:-0.777778 7:-0.777778 8:-0.555556 9:-1 10:-1 +2 1:-0.824613 2:-0.555556 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.824562 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.82447 2:1 3:1 4:1 5:0.111111 6:0.555556 7:-0.333333 8:0.555556 9:-0.111111 10:-1 +2 1:-0.824428 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.824423 2:0.555556 3:-0.111111 4:0.111111 5:-0.777778 6:-0.555556 7:1 8:0.111111 9:0.111111 10:-1 +2 1:-0.824381 2:-0.555556 3:-0.555556 4:-0.777778 5:0.111111 6:-0.555556 7:-0.555556 8:-0.555556 9:-0.111111 10:-1 +4 1:-0.824364 2:0.555556 3:0.333333 4:0.555556 5:-0.111111 6:1 7:1 8:0.333333 9:-0.777778 10:-1 +2 1:-0.824271 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.824216 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.777778 7:-0.777778 8:-0.555556 9:-0.777778 10:-0.777778 +2 1:-0.822249 2:-0.777778 3:-0.555556 4:-1 5:-1 6:-0.111111 7:-1 8:-1 9:-1 10:-1 +2 1:-0.821968 2:-0.555556 3:-0.777778 4:-0.777778 5:-0.555556 6:-0.777778 7:-0.555556 8:-0.555556 9:-1 10:-1 +4 1:-0.821697 2:1 3:1 4:1 5:0.333333 6:1 7:1 8:0.555556 9:-0.777778 10:-1 +2 1:-0.82163 2:-0.333333 3:-0.555556 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.555556 10:-1 +2 1:-0.821605 2:-0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.821587 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.821496 2:0.777778 3:1 4:1 5:1 6:1 7:1 8:1 9:1 10:-1 +2 1:-0.821495 2:-0.111111 3:-0.555556 4:0.111111 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.82036 2:0.555556 3:0.333333 4:0.555556 5:-0.777778 6:-0.333333 7:-0.777778 8:-0.111111 9:1 10:-1 +2 1:-0.819481 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.818876 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.818876 2:-1 3:-0.555556 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.818876 2:-0.111111 3:-1 4:-1 5:-0.555556 6:-0.333333 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.818646 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.816285 2:-0.555556 3:-0.777778 4:-0.777778 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.816285 2:0.111111 3:0.777778 4:0.333333 5:-0.111111 6:-0.111111 7:0.555556 8:-0.333333 9:-0.777778 10:-1 +4 1:-0.816117 2:1 3:0.555556 4:1 5:-1 6:-0.555556 7:1 8:-0.111111 9:-1 10:-1 +4 1:-0.816024 2:1 3:1 4:1 5:-1 6:0.111111 7:-1 8:-0.777778 9:0.555556 10:-1 +2 1:-0.930705 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.926045 2:-0.333333 3:-1 4:-0.555556 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.924855 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.918634 2:1 3:-0.333333 4:-0.555556 5:1 6:-0.333333 7:1 8:1 9:-1 10:-1 +2 1:-0.908217 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.333333 6:-0.777778 7:-0.333333 8:-1 9:-1 10:-1 +2 1:-0.899823 2:-1 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-0.555556 8:-1 9:-1 10:-1 +2 1:-0.899823 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-1 9:-1 10:-1 +2 1:-0.892886 2:-0.111111 3:-1 4:-1 5:0.111111 6:-0.555556 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.885856 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.852668 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.851322 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832868 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +4 1:-0.831204 2:-0.111111 3:0.333333 4:0.777778 5:0.555556 6:0.111111 7:1 8:0.555556 9:1 10:-1 +2 1:-0.829961 2:-0.333333 3:-1 4:-1 5:-0.555556 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.829904 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.826196 2:-0.555556 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.825611 2:-0.333333 3:-0.111111 4:-0.111111 5:0.555556 6:0.111111 7:1 8:1 9:0.333333 10:-1 +2 1:-0.824503 2:-0.777778 3:-0.555556 4:-1 5:-1 6:-0.555556 7:-1 8:-1 9:-1 10:-1 +4 1:-0.823286 2:1 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:0.111111 8:-1 9:-1 10:-0.777778 +4 1:-0.821657 2:1 3:0.111111 4:-0.111111 5:0.555556 6:-0.111111 7:1 8:0.555556 9:0.111111 10:-1 +4 1:-0.821427 2:0.555556 3:0.555556 4:0.777778 5:0.111111 6:0.111111 7:-0.555556 8:1 9:1 10:-1 +2 1:-0.820364 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.820099 2:-0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.820037 2:-0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.819964 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-1 9:-1 10:-1 +2 1:-0.818876 2:0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.818253 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-0.777778 10:-1 +2 1:-0.816206 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.815817 2:1 3:0.777778 4:0.555556 5:0.333333 6:0.111111 7:-0.333333 8:0.333333 9:1 10:-0.555556 +4 1:-0.815541 2:1 3:0.111111 4:0.111111 5:-0.777778 6:-0.333333 7:1 8:0.777778 9:0.333333 10:-1 +4 1:-0.815365 2:0.111111 3:0.111111 4:0.111111 5:-0.111111 6:-0.333333 7:1 8:0.333333 9:0.111111 10:-0.777778 +2 1:-0.844351 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832976 2:-1 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.828509 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.824449 2:0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.821636 2:0.111111 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.819849 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.818718 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.817204 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815898 2:-0.333333 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815863 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815814 2:-0.111111 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.81543 2:-0.333333 3:0.555556 4:0.333333 5:1 6:-0.333333 7:1 8:0.333333 9:-0.111111 10:-1 +2 1:-0.815014 2:-0.111111 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.814942 2:-0.111111 3:-0.555556 4:-0.777778 5:-0.333333 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.812591 2:0.777778 3:1 4:1 5:1 6:1 7:-0.111111 8:1 9:1 10:1 +4 1:-0.938647 2:0.555556 3:0.333333 4:0.555556 5:-0.111111 6:-0.111111 7:1 8:0.777778 9:1 10:-1 +2 1:-0.891856 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.859809 2:-1 3:-1 4:-1 5:-0.555556 6:-1 7:-0.555556 8:-1 9:-1 10:-1 +2 1:-0.849578 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.849065 2:1 3:1 4:1 5:1 6:0.111111 7:1 8:0.555556 9:-1 10:-0.111111 +4 1:-0.848708 2:-0.555556 3:0.111111 4:-0.333333 5:1 6:-0.555556 7:-0.555556 8:-0.555556 9:-0.333333 10:-1 +4 1:-0.847545 2:0.111111 3:-0.555556 4:-0.777778 5:-1 6:-0.555556 7:-0.333333 8:-0.333333 9:-1 10:-1 +2 1:-0.842892 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.84231 2:-0.111111 3:0.555556 4:0.777778 5:-0.333333 6:-0.555556 7:1 8:0.333333 9:-1 10:-1 +2 1:-0.840099 2:-0.333333 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.838797 2:-0.111111 3:1 4:1 5:1 6:0.111111 7:1 8:0.111111 9:-0.111111 10:-0.777778 +2 1:-0.836817 2:-0.111111 3:-1 4:-0.777778 5:1 6:-0.333333 7:-0.111111 8:-0.777778 9:-1 10:-1 +2 1:-0.83458 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.832993 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832868 2:-0.333333 3:-0.777778 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.829559 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.827559 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.826571 2:0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.824537 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.822187 2:-0.333333 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.820398 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.81997 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.8187 2:-0.555556 3:-0.555556 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.817255 2:0.555556 3:1 4:1 5:1 6:0.333333 7:-0.111111 8:-0.333333 9:0.555556 10:0.333333 +2 1:-0.815976 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-0.333333 8:-1 9:-1 10:-1 +2 1:-0.815704 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815675 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.81555 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815316 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.815306 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.814618 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.814366 2:0.111111 3:0.111111 4:0.333333 5:1 6:-0.555556 7:1 8:0.555556 9:1 10:-0.777778 +4 1:-0.813315 2:-0.333333 3:1 4:-0.333333 5:0.333333 6:-0.555556 7:1 8:0.777778 9:1 10:-1 +2 1:-0.812306 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.812306 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.812272 2:-0.555556 3:-1 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.810759 2:-0.333333 3:0.333333 4:0.555556 5:-0.555556 6:-0.333333 7:1 8:0.777778 9:-1 10:-1 +2 1:-0.959716 2:-1 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-1 9:-1 10:-1 +2 1:-0.954269 2:-0.333333 3:-1 4:-1 5:-1 6:-0.555556 7:-1 8:-1 9:-1 10:-1 +4 1:-0.947887 2:1 3:-0.333333 4:-0.111111 5:-0.333333 6:-0.555556 7:-0.111111 8:0.333333 9:-0.555556 10:-1 +4 1:-0.909082 2:0.333333 3:-0.111111 4:0.111111 5:1 6:-0.333333 7:1 8:-0.111111 9:-0.555556 10:-1 +2 1:-0.897501 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.89452 2:-0.555556 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.89452 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.890217 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.888838 2:0.111111 3:-1 4:-0.555556 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:0.220274 2:-0.333333 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.884384 2:0.333333 3:-0.333333 4:-0.333333 5:-0.555556 6:-0.333333 7:1 8:0.111111 9:0.777778 10:-1 +2 1:-0.879917 2:-0.333333 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.879553 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.853679 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.851722 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.850853 2:-1 3:-1 4:-0.555556 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.843225 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.842757 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.839935 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.838392 2:0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.83649 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-0.777778 8:-0.777778 9:-1 10:-1 +2 1:-0.834485 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.833998 2:-0.111111 3:-0.555556 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.832542 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.832191 2:-0.777778 3:-1 4:-0.555556 5:-0.777778 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.83061 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.826885 2:0.111111 3:1 4:1 5:1 6:-0.333333 7:1 8:0.333333 9:1 10:-1 +2 1:-0.824176 2:-0.777778 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.824176 2:-0.555556 3:-1 4:-1 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +4 1:-0.824111 2:0.333333 3:0.555556 4:-0.555556 5:0.333333 6:-0.333333 7:-0.111111 8:0.333333 9:0.555556 10:-0.777778 +2 1:-0.817101 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.816889 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.815316 2:-0.555556 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-0.333333 9:-0.777778 10:-1 +2 1:-0.814358 2:-0.333333 3:-0.333333 4:-0.777778 5:-1 6:-0.777778 7:-0.111111 8:-0.777778 9:-1 10:-0.777778 +2 1:-0.813265 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.813216 2:-0.333333 3:-0.555556 4:-1 5:-1 6:-0.777778 7:-1 8:-0.333333 9:0.555556 10:-1 +2 1:-0.812129 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.777778 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.81212 2:-0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.812116 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.812029 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.812028 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.812028 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.811073 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.81101 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.810614 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +4 1:-0.810613 2:-0.111111 3:0.333333 4:1 5:1 6:-0.111111 7:1 8:1 9:1 10:-1 +2 1:-0.810361 2:-0.555556 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.810216 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-0.555556 8:-0.777778 9:-1 10:-1 +4 1:-0.808872 2:0.555556 3:-0.333333 4:-0.333333 5:-1 6:0.111111 7:1 8:-0.777778 9:-0.111111 10:-0.777778 +4 1:-0.808827 2:1 3:1 4:0.555556 5:1 6:0.111111 7:-0.111111 8:1 9:-0.555556 10:-1 +4 1:-0.808715 2:0.555556 3:1 4:-0.333333 5:-0.333333 6:0.555556 7:1 8:0.555556 9:-0.777778 10:-1 +4 1:-0.988118 2:0.333333 3:0.111111 4:1 5:-0.111111 6:-0.555556 7:1 8:0.777778 9:1 10:-0.777778 +2 1:-0.981994 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.961017 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.952968 2:1 3:0.777778 4:0.333333 5:-0.555556 6:-0.333333 7:-0.777778 8:0.333333 9:0.333333 10:-1 +2 1:-0.951949 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.906328 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.90565 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.905631 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.88636 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.879296 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.878279 2:-0.111111 3:0.333333 4:1 5:0.111111 6:-0.111111 7:1 8:0.333333 9:-0.111111 10:-1 +4 1:-0.844028 2:0.111111 3:1 4:-0.111111 5:-0.111111 6:-0.333333 7:1 8:0.111111 9:1 10:-1 +2 1:-0.842821 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.827594 2:-0.111111 3:-1 4:-1 5:0.111111 6:-0.555556 7:-1 8:-1 9:-1 10:-1 +2 1:-0.824353 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.822095 2:0.555556 3:1 4:1 5:1 6:0.111111 7:1 8:1 9:1 10:-1 +2 1:-0.821017 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +4 1:-0.820043 2:0.777778 3:0.555556 4:0.555556 5:0.777778 6:0.111111 7:-0.555556 8:-0.333333 9:-1 10:-1 +2 1:-0.819462 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.816162 2:-0.333333 3:1 4:0.555556 5:-0.111111 6:-0.333333 7:-1 8:1 9:-1 10:-1 +4 1:-0.815973 2:-0.777778 3:-0.111111 4:0.333333 5:0.111111 6:-0.333333 7:1 8:0.333333 9:0.111111 10:-1 +4 1:-0.815531 2:1 3:-0.555556 4:-0.333333 5:-0.111111 6:-0.555556 7:1 8:-0.333333 9:-1 10:-1 +2 1:-0.813531 2:-0.111111 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.812989 2:-0.333333 3:0.555556 4:0.111111 5:-0.555556 6:-0.333333 7:1 8:0.333333 9:-1 10:-1 +2 1:-0.812297 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.811525 2:-0.333333 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.810367 2:-0.111111 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.810302 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.810128 2:-0.111111 3:-0.777778 4:-0.333333 5:-1 6:-1 7:-1 8:-1 9:-1 10:-1 +2 1:-0.809809 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.808666 2:-1 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.807752 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +4 1:-0.807439 2:-0.111111 3:-0.333333 4:0.111111 5:0.555556 6:-0.333333 7:-1 8:0.555556 9:1 10:-1 +4 1:-0.981337 2:-0.111111 3:-0.555556 4:-0.777778 5:0.555556 6:-0.111111 7:1 8:0.555556 9:-1 10:-0.777778 +4 1:-0.956828 2:1 3:-0.111111 4:1 5:-0.555556 6:-0.111111 7:0.555556 8:0.333333 9:0.555556 10:-0.555556 +2 1:-0.956729 2:-0.333333 3:-1 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.948013 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.926188 2:-0.111111 3:1 4:1 5:1 6:1 7:1 8:1 9:-1 10:-1 +2 1:-0.91442 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.899518 2:1 3:-0.333333 4:-0.555556 5:1 6:-0.555556 7:1 8:0.333333 9:-1 10:-0.777778 +4 1:-0.889455 2:-0.111111 3:1 4:1 5:1 6:-0.111111 7:-0.777778 8:0.555556 9:-0.111111 10:-1 +4 1:-0.886572 2:0.555556 3:1 4:1 5:1 6:0.111111 7:1 8:1 9:1 10:1 +2 1:-0.857627 2:-0.777778 3:-0.555556 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.85539 2:-0.777778 3:-1 4:-1 5:-1 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.853981 2:-0.333333 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.853801 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.850853 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.849083 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.847593 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.84572 2:0.111111 3:-0.555556 4:-0.555556 5:-0.555556 6:-0.555556 7:-0.777778 8:0.111111 9:-1 10:-1 +2 1:-0.839112 2:0.333333 3:-1 4:-0.777778 5:-0.555556 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.837775 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.834122 2:-0.111111 3:-1 4:-1 5:-0.777778 6:-1 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.83269 2:-0.555556 3:-1 4:-0.555556 5:-1 6:-0.555556 7:-0.333333 8:-1 9:-1 10:-1 +4 1:-0.831676 2:-0.333333 3:0.111111 4:0.111111 5:-0.111111 6:0.333333 7:0.111111 8:0.333333 9:0.333333 10:-0.555556 +2 1:-0.831652 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-0.111111 8:-1 9:-1 10:-1 +2 1:-0.828258 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.827405 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.826449 2:0.111111 3:-0.777778 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.824892 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.824449 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.822182 2:0.555556 3:0.333333 4:-0.333333 5:-0.333333 6:-0.111111 7:-0.555556 8:-0.111111 9:1 10:-1 +2 1:-0.821672 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.82118 2:-0.555556 3:-1 4:-0.333333 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.819942 2:1 3:1 4:0.333333 5:0.555556 6:0.333333 7:-1 8:1 9:1 10:-0.555556 +2 1:-0.818918 2:-0.333333 3:-0.777778 4:-0.333333 5:-0.555556 6:-0.777778 7:-0.777778 8:-0.777778 9:-1 10:-1 +2 1:-0.818622 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.818622 2:-0.111111 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.817437 2:-0.333333 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.817006 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.816768 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.816139 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.815365 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.814784 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.813657 2:-1 3:-0.777778 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.813646 2:-1 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.812944 2:-0.111111 3:1 4:1 5:1 6:1 7:-0.777778 8:1 9:1 10:1 +2 1:-0.812516 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.812376 2:-0.555556 3:-1 4:-1 5:-0.777778 6:-0.555556 7:-0.333333 8:-1 9:-1 10:-1 +2 1:-0.811798 2:-1 3:-0.777778 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.811635 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.777778 10:-1 +2 1:-0.811619 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.811547 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.811288 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.81077 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.810245 2:-0.111111 3:-0.333333 4:-0.111111 5:-1 6:0.555556 7:-1 8:-0.555556 9:0.111111 10:-1 +4 1:-0.810224 2:0.333333 3:0.555556 4:0.555556 5:0.333333 6:-0.555556 7:1 8:0.333333 9:-0.777778 10:-0.555556 +2 1:-0.810127 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.809363 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.809363 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:1 2:-1 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.808516 2:-1 3:-1 4:-0.555556 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.808495 2:-0.555556 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.808173 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.808144 2:-0.111111 3:-0.777778 4:-0.777778 5:-0.777778 6:-0.777778 7:-1 8:-1 9:-1 10:-0.777778 +2 1:-0.808009 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +4 1:-0.807789 2:-0.111111 3:0.333333 4:-0.333333 5:-1 6:0.111111 7:-1 8:0.333333 9:1 10:-0.555556 +4 1:-0.807774 2:-0.111111 3:1 4:1 5:0.555556 6:-0.111111 7:-0.111111 8:0.333333 9:1 10:-1 +4 1:-0.807412 2:-0.555556 3:1 4:0.333333 5:0.555556 6:-0.111111 7:0.555556 8:0.333333 9:-0.333333 10:-1 +2 1:-0.807375 2:-0.555556 3:-0.777778 4:-1 5:-0.777778 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.807114 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-1 10:-1 +2 1:-0.807114 2:-0.111111 3:-0.555556 4:-0.777778 5:-1 6:-0.555556 7:-1 8:-1 9:-1 10:-1 +2 1:-0.807051 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.805586 2:-0.333333 3:-1 4:-0.333333 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.805548 2:-1 3:-1 4:-0.777778 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-1 +2 1:-0.805109 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.805108 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.805017 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.804877 2:1 3:1 4:1 5:1 6:-0.111111 7:1 8:1 9:1 10:0.333333 +4 1:-0.804697 2:-0.111111 3:1 4:1 5:1 6:-0.333333 7:1 8:-0.111111 9:0.111111 10:-0.555556 +2 1:-0.804563 2:-0.111111 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.555556 9:-0.777778 10:-1 +2 1:-0.939731 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.939731 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.929627 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.929306 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.924879 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-0.555556 10:-1 +2 1:-0.919383 2:-0.333333 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.911706 2:-1 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:0.555556 +2 1:-0.911706 2:-1 3:-1 4:-1 5:-0.555556 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.90565 2:-0.111111 3:1 4:1 5:-0.111111 6:-0.333333 7:-0.111111 8:-0.333333 9:-0.333333 10:-1 +2 1:-0.902821 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +2 1:-0.895473 2:-0.555556 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-0.777778 9:-1 10:-0.777778 +2 1:-0.89346 2:-0.555556 3:-1 4:-1 5:-1 6:-0.555556 7:-0.777778 8:-1 9:-1 10:-1 +2 1:-0.883744 2:-0.777778 3:-1 4:-1 5:-1 6:-0.777778 7:-1 8:-1 9:-1 10:-1 +4 1:-0.876716 2:-0.111111 3:1 4:1 5:-0.555556 6:0.333333 7:-0.555556 8:0.555556 9:1 10:-0.777778 +4 1:-0.875424 2:-0.333333 3:0.555556 4:0.111111 5:-0.333333 6:-0.555556 7:-0.333333 8:1 9:0.111111 10:-1 +4 1:-0.875424 2:-0.333333 3:0.555556 4:0.555556 5:-0.111111 6:-0.333333 7:-0.111111 8:1 9:-0.333333 10:-1 diff --git a/inst/dev/datasets/svm/circles.data b/inst/dev/datasets/svm/circles.data new file mode 100644 index 00000000..bdb63c0f --- /dev/null +++ b/inst/dev/datasets/svm/circles.data @@ -0,0 +1,300 @@ +-4.60163011633 -0.351510739159 1 +-4.52852676593 -0.280335929013 1 +-3.98567261457 0.0374258475475 1 +-4.15077681133 0.0973234799017 1 +-3.93663297773 0.905869810506 1 +-3.20894140838 0.399884703914 1 +-4.57188293948 -0.545466862256 1 +-3.11440993452 -0.161590604834 1 +-4.07486909136 0.636587071254 1 +-4.2354087298 -0.87340768101 1 +-3.71059176351 -0.627505454273 1 +-4.63653470657 0.155811518266 1 +-3.40327366904 0.296546050219 1 +-3.71785194572 -0.506750610447 1 +-4.32929370955 -0.631467444941 1 +-3.3258781001 0.150909756807 1 +-3.40639570209 0.0874464484391 1 +-3.33125574079 0.366802236577 1 +-3.55390757791 0.70960656733 1 +-3.30247635994 0.221642415631 1 +-4.8013813007 -0.352517086647 1 +-3.70113460223 -0.426571886587 1 +-3.2909022335 0.352015468479 1 +-4.07441595509 0.857286152593 1 +-4.26227441684 0.850499302502 1 +-3.72704556351 -0.575286694991 1 +-3.69501780066 -0.141877895072 1 +-4.63725184486 -0.447061049685 1 +-4.62615756539 -0.770971661629 1 +-3.56939797821 0.774744367857 1 +-3.62505937853 -0.434368225405 1 +-4.74538980878 0.067124355493 1 +-4.61846151077 0.0281206648351 1 +-4.6435187906 -0.413397703982 1 +-3.35905482946 -0.66784086941 1 +-3.44928266921 -0.233147156007 1 +-3.01578655089 -0.0240705674846 1 +-4.363486931 -0.791612153048 1 +-3.86347397575 -0.454623070669 1 +-4.12335726568 -0.692327691929 1 +-3.05756185499 -0.0930134741395 1 +-4.78703277019 -0.550397001204 1 +-3.82415259403 -0.544245983172 1 +-4.89654409845 0.232047144276 1 +-3.41085687438 0.455208510561 1 +-4.76349754202 -0.321435825235 1 +-4.67801594387 0.516905554787 1 +-3.98563394708 0.109381040616 1 +-4.73913675228 0.479469224646 1 +-4.78427583919 -0.428263840559 1 +-4.90433185829 -0.294251753899 1 +-4.82809289661 0.443015487729 1 +-4.83735386225 0.0959954838765 1 +-3.34133896063 0.0400138447012 1 +-4.00790849828 -0.00820858921797 1 +-4.80643863496 -0.21125139973 1 +-4.01936280702 0.700891457988 1 +-3.86427946536 -0.98070757185 1 +-3.57171834826 -0.536955892624 1 +-3.081856983 0.281528197793 1 +-3.9301313872 -0.96467514567 1 +-4.58321650296 -0.25538088573 1 +-3.24579859483 -0.271276903315 1 +-4.40377270996 -0.50682966774 1 +-3.94639262847 0.554853934029 1 +-4.3554316191 -0.724724894623 1 +-4.26671241042 -0.503395193263 1 +-4.17885148649 0.726464775708 1 +-4.81208281475 0.309526279499 1 +-3.9194497796 0.586871047472 1 +-4.43288016346 0.848165243694 1 +-4.48918142919 -0.770656213868 1 +-3.90647618384 0.753975569219 1 +-4.02927259541 0.784080302299 1 +-4.57386992062 0.11027975838 1 +-4.83035242795 0.554233142256 1 +-4.37253931308 -0.533177808528 1 +-4.10989763386 -0.365719547979 1 +-3.53630213914 -0.384611611332 1 +-3.13121600034 -0.410427232854 1 +7.03466040519 0.637911244515 -1 +3.91605519751 1.04037450684 -1 +4.89791231078 -1.24413347676 -1 +5.59597322625 -1.00141971408 -1 +5.92113360424 -2.6330059983 -1 +5.5501343095 -0.883505780697 -1 +5.90120872469 -0.782250950535 -1 +7.48463799358 -1.32692848744 -1 +5.89622492294 2.65649036459 -1 +7.19556460994 0.85990665066 -1 +8.29462966766 -0.861543253399 -1 +6.40060400425 2.4509346249 -1 +4.81970882321 -0.989437993913 -1 +7.99474582779 -0.616687230703 -1 +7.54502567341 -2.05095905407 -1 +7.09998127574 -2.72710246531 -1 +4.73440680875 -2.49584420372 -1 +4.9363872534 0.784397702237 -1 +8.26316536469 -0.578639201559 -1 +8.62411892888 -1.29147997418 -1 +8.6096476227 -0.618702403308 -1 +3.30761860029 -1.22170178932 -1 +5.04309549952 -1.71639673024 -1 +7.8918975946 -0.0351273726283 -1 +8.96505263293 -0.269317983125 -1 +7.99158317496 -1.2277811964 -1 +5.07578321504 2.33846477984 -1 +5.67341210342 1.13522240472 -1 +7.91156815079 -1.82780858884 -1 +6.35061643503 1.03047543915 -1 +6.77216288877 -0.982587042086 -1 +5.06892931957 -2.75589994746 -1 +3.60590481366 1.79588281771 -1 +5.48612277035 0.661220520837 -1 +6.20900720172 2.64190981705 -1 +4.41037419414 -0.756308460767 -1 +3.66587998149 0.458278777461 -1 +7.49597920832 -1.43982791264 -1 +5.00275405299 1.75727306828 -1 +4.90077988483 2.47890504415 -1 +7.43307303445 0.432892388104 -1 +7.60476562059 0.490334276064 -1 +6.11357464563 -1.99471404274 -1 +7.56581354991 -0.17483730562 -1 +8.09926546072 -0.433930576292 -1 +7.76538991233 0.347533991401 -1 +6.73093100273 1.14595735606 -1 +6.94129664139 -1.77578995178 -1 +5.03985704149 2.17638232834 -1 +3.64724233886 -1.81220824129 -1 +5.8037678438 -0.433179301263 -1 +5.74856452122 -0.362133243659 -1 +7.58559336567 -1.44699056703 -1 +4.58239450528 1.06630570558 -1 +4.31715762689 -1.45861510785 -1 +8.22374120138 -1.74205710836 -1 +7.63702452792 -1.39849951825 -1 +6.82366104262 -1.46736488335 -1 +4.91150357073 -0.999271966336 -1 +6.10066042974 0.467950289164 -1 +7.90270762186 -1.51259615484 -1 +7.64247968777 -1.63302060272 -1 +3.90361683315 0.647043675254 -1 +5.3119517944 -1.17358339488 -1 +5.7289635463 -2.04139974109 -1 +3.53031320269 -1.16737515045 -1 +8.04269865388 1.09286174407 -1 +3.84065828833 1.92221931759 -1 +6.73863861532 -1.31906588538 -1 +3.92984037928 -1.81598299828 -1 +7.55448736525 -2.30858315364 -1 +3.91367532165 0.2945558938 -1 +3.07069567131 0.525211163697 -1 +5.74539618613 -2.07384790508 -1 +5.39946205782 -0.544047888916 -1 +8.06957115698 -1.43339699148 -1 +5.26183497427 -0.517654054419 -1 +5.88254055013 1.32065325944 -1 +7.32706767213 1.04232294234 -1 +8.31112802049 1.13123833654 -1 +4.66126919915 -1.6346802049 -1 +7.01381273076 2.35794806988 -1 +7.4540126253 -1.18797557797 -1 +4.88428953275 -1.0697229197 -1 +4.58530403593 0.563057568738 -1 +7.90880074198 0.680366074932 -1 +6.76738907556 2.44253949663 -1 +7.51099517025 -1.86263051736 -1 +7.43221351477 -1.09367156284 -1 +5.30543723954 -1.27689504424 -1 +6.39202241174 2.80670624494 -1 +3.24349118196 0.483112816818 -1 +7.2466893052 0.138918256857 -1 +4.08874573246 1.20487891564 -1 +7.51795487673 -0.431123857132 -1 +8.18186289502 1.00588727044 -1 +3.2738981807 -0.690370682141 -1 +6.76668668719 2.80870085676 -1 +5.78079905169 0.414652619684 -1 +6.89814922376 -2.09054518672 -1 +8.14013835596 -0.977716615055 -1 +7.24446750951 0.961885878832 -1 +7.30972503961 1.42603808395 -1 +5.03506857394 0.429077012993 -1 +7.22174878561 -0.0930248262107 -1 +3.53387060362 1.18505742128 -1 +3.56085314112 0.448650871363 -1 +4.42161156138 -1.18539585901 -1 +8.69247187255 -1.04165395113 -1 +3.65321850291 -1.33552737913 -1 +6.30453172981 -1.07759038652 -1 +3.58928355533 1.56180946331 -1 +7.26226495635 -2.38823307052 -1 +8.59315110481 -0.644828244776 -1 +3.56418158869 -1.19891839314 -1 +5.90178703087 2.01616298421 -1 +7.93407862897 -0.743165231297 -1 +5.81217628766 -1.27984377415 -1 +8.03636428553 1.43793866032 -1 +6.80858898834 -2.32501007586 -1 +6.00251572421 2.8128913452 -1 +4.3344762229 1.88583482214 -1 +4.72249089692 1.57017893936 -1 +7.79426223243 -2.34756955989 -1 +4.53742809922 -2.3697461566 -1 +4.00528409781 -2.10979538943 -1 +3.99456673446 0.716069653413 -1 +5.81772195364 -0.0561475488423 -1 +7.63221870015 0.481924791863 -1 +4.09813822909 1.8330401497 -1 +6.56642268583 -2.93019846931 -1 +7.06831193094 2.35071599243 -1 +4.29992518532 -2.12510169819 -1 +5.77573038382 -0.785192896462 -1 +4.93905051299 -1.12520149993 -1 +7.18089944875 -2.7512756131 -1 +8.20716167761 -1.66304787933 -1 +7.58020921224 1.50325080022 -1 +7.75441247797 0.513770157659 -1 +8.46669771723 0.313182377912 -1 +6.04980370766 -1.50522793738 -1 +4.23931158215 -2.42141783803 -1 +3.27222827011 -0.0161137298465 -1 +7.65556006023 -1.59087682416 -1 +7.46259295413 0.831070132669 -1 +5.32859609161 1.8725039549 -1 +7.25006961302 -0.657187875848 -1 +5.3832383954 -0.883177920774 -1 +6.74608771201 1.48055081224 -1 +7.20761293091 -0.22546637754 -1 +7.86874133892 2.08293315445 -1 +6.78484173375 2.22150256772 -1 +5.59261313328 0.352006491057 -1 +5.72035327348 0.192714702964 -1 +6.21836518439 2.7908628538 -1 +5.01627707549 -0.32720312312 -1 +6.0968001372 1.35614253929 -1 +6.85827161405 0.682644297884 -1 +4.50258055972 2.4614248741 -1 +3.66221464092 -0.425933942269 -1 +3.85145482304 1.12236288376 -1 +5.59464139238 1.52907607811 -1 +7.6124445941 0.493320337329 -1 +5.36025258674 -2.63042584719 -1 +6.18409523615 1.8768987183 -1 +5.34971481617 -2.01851497061 -1 +7.56768670012 -0.677430870819 -1 +5.8849224985 1.46907428524 -1 +7.97329787685 -0.761074736979 -1 +8.11406880849 -0.327880389687 -1 +6.68295262712 -2.43011982745 -1 +6.01386051216 -2.35667484904 -1 +5.79385606878 -1.21538709459 -1 +6.55881353853 0.136967732511 -1 +6.45087522363 1.21387320042 -1 +6.89468220458 -0.739362441811 -1 +6.35640194371 2.28968761635 -1 +7.94639525842 0.0212328188449 -1 +5.71094422095 0.460886816864 -1 +7.88971908843 -1.55930808311 -1 +4.42672698655 -1.04927853221 -1 +8.77873269777 -0.105211372942 -1 +4.81023540822 2.23104143854 -1 +5.49402305114 0.914707741972 -1 +6.54243779887 2.71330481686 -1 +7.67231006221 -0.0669629959113 -1 +7.27185359383 -1.52683541594 -1 +7.43109017945 -2.53105943832 -1 +6.66471773798 2.04112569822 -1 +4.24110245823 1.21845583038 -1 +3.33329729681 0.479310754951 -1 +8.61851027304 -1.4565785356 -1 +4.67668032282 -2.34483686024 -1 +4.07722164473 0.159897925153 -1 +3.06937526174 0.246591914045 -1 +6.0566800084 -1.47392054653 -1 +7.11600132011 1.63612282713 -1 +8.27089826768 0.25588441743 -1 +7.21161942117 -0.268037400728 -1 +5.53040868327 1.66621360985 -1 +4.16022905747 0.538285802482 -1 +6.66097407017 -0.16666115221 -1 +8.31994963914 -0.75023441312 -1 +4.51107967255 0.151300462607 -1 +4.19052711425 0.0535392445078 -1 +3.92804505317 -1.54536616379 -1 +5.09553439329 -0.398022076319 -1 +7.27488438872 0.795974696275 -1 +6.88264996045 2.21382911071 -1 +8.47340071315 1.26366266452 -1 +5.53606453564 -0.983332109653 -1 +5.56108241838 0.645128003222 -1 +5.82394226064 -0.0403838504709 -1 +7.80404706124 0.0059916749515 -1 +7.45838268208 -2.57365258823 -1 +6.66701285501 0.469645651457 -1 +3.47294196613 -0.239754855755 -1 +5.66460409732 -0.538116237172 -1 +5.11405627959 1.8861678354 -1 +8.52867719999 0.411090501404 -1 diff --git a/inst/dev/datasets/svm/convert_to_RData.R b/inst/dev/datasets/svm/convert_to_RData.R new file mode 100644 index 00000000..f5e48486 --- /dev/null +++ b/inst/dev/datasets/svm/convert_to_RData.R @@ -0,0 +1,48 @@ +# RUN IN inst/data_sets/svm FOLDER! + +# 2e +svm.twocircles.dataset <- data.frame(read.table(system.file("dev", "datasets", "svm","circles.data", package="gmum.r"))); +save(svm.twocircles.dataset, file="svm_two_circles_dataset.RData", compress=TRUE); + +# 2e +svm.twoellipsoids.dataset <- data.frame(read.table(system.file("dev", "datasets", "svm","2e.data", package="gmum.r"))); +save(svm.twoellipsoids.dataset, file="svm_two_ellipsoids_dataset.RData", compress=TRUE); + +# Breast Cancer +svm.breastcancer.dataset <- data.frame(read.table(system.file("dev", "datasets", "svm","breast_cancer.data", package="gmum.r"), quote="\"")); +save(svm.breastcancer.dataset, file="svm_breast_cancer_dataset.RData", compress=TRUE); + + +# Transduction + +library(SparseM) +library(e1071) +system(system.file("dev", "datasets", "svm", "download_transduction.sh", + mustWork=TRUE, package="gmum.r")) + +train.transduction <- read.matrix.csr("transductive/train_transduction.dat") +train.induction <- read.matrix.csr("transductive/train_induction.dat") +test <- read.matrix.csr("transductive/test.dat") + +# Rest zeros out +train.transduction$x <- train.transduction$x[,1:9253] +train.induction$x <- train.induction$x[,1:9253] + +# Now standarize labels (this ugly code is because read.matrix.csr fails) +train.induction$y <- as.factor(as.numeric(train.induction$y)) +train.transduction$y <- as.numeric(train.transduction$y) +train.transduction$y[train.transduction$y == 1] = 0 +train.transduction$y <- as.factor(train.transduction$y) +test$y <- (as.numeric(test$y)) +test$y[test$y == 1] = 3 +test$y[test$y == 2] = 1 +test$y[test$y == 3] = 2 +test$y <- as.factor(test$y) + +# Save on space +test$x <- NULL + +svm.transduction <- list(tr=train.transduction, ind=train.induction, test=test) + +save(svm.transduction, file="svm.transduction.RData") + diff --git a/inst/dev/datasets/svm/dexter_train.data b/inst/dev/datasets/svm/dexter_train.data new file mode 100644 index 00000000..07eff5fc --- /dev/null +++ b/inst/dev/datasets/svm/dexter_train.data @@ -0,0 +1,300 @@ +10:105 39:85 431:122 951:42 981:82 989:132 1001:44 1312:84 1674:160 1951:70 1968:149 2339:196 2820:76 2990:90 3012:52 3490:162 3578:38 3685:98 4181:107 4473:47 4554:69 4707:63 4992:146 5081:69 5379:118 5877:134 6866:459 6955:93 7328:103 7494:210 7638:72 7709:116 7729:71 7947:90 8099:65 8475:22 8986:82 9043:81 9596:57 9987:54 10457:25 10532:269 11142:83 11778:57 11846:65 12209:129 12347:50 12380:96 12439:43 12448:44 12781:178 12847:24 12916:29 13258:87 13372:808 14082:217 14161:28 14729:49 14749:82 14851:55 15429:87 15772:79 15777:109 15793:142 16034:35 16478:91 16551:132 16773:186 16798:97 17782:139 18115:88 18190:111 18295:62 18364:41 18372:69 18800:76 19174:45 19187:72 19386:69 19685:41 19825:131 19921:51 19935:91 19999:56 +257:74 367:129 626:297 1040:147 1052:69 1633:173 1691:100 1914:110 2185:34 3084:49 3266:105 3953:25 4051:322 4723:124 4799:77 5487:122 5577:194 5736:133 5743:212 6792:16 6866:128 7055:197 7129:15 7494:145 7507:126 7709:391 7896:126 8517:111 8759:88 8789:113 8984:36 9415:67 10000:58 10244:465 10532:202 10579:168 11685:192 11878:114 12170:102 12380:52 12916:118 13131:68 13285:388 13428:139 13881:111 14184:77 14239:204 14321:131 14332:74 14562:3 14665:86 14874:44 15108:40 15204:231 15777:131 16757:143 16974:226 17487:185 17621:136 17841:121 18115:53 18364:39 18943:31 19109:74 19247:88 19488:118 19517:68 19610:76 19685:236 +247:28 257:99 865:85 1209:133 2275:47 2339:27 2442:62 2934:74 2990:212 3484:31 3578:20 3633:92 3830:62 3882:29 4614:21 5243:80 5256:89 6199:13 6259:70 6373:91 6374:47 6859:37 6865:57 6866:310 6905:67 6936:113 7494:94 7753:41 8548:20 8789:146 8799:87 8804:108 8925:81 8986:34 9127:63 9792:105 9870:48 10489:102 10561:119 10650:37 11006:29 11340:109 11456:111 11474:394 11530:77 11668:60 11685:115 11869:85 12082:91 12217:887 12610:51 12614:108 12916:4 13929:27 13982:109 14161:11 14851:23 15561:119 15785:53 16034:46 16342:151 16552:39 16676:65 16851:77 17173:22 17272:33 17365:178 17602:94 17675:67 17896:144 18115:112 18160:31 18372:46 18734:40 19078:63 19187:30 19386:14 +626:277 656:146 827:166 1040:110 1353:76 1436:113 1761:95 1914:82 1968:142 2309:154 2509:104 3003:132 3124:48 3153:76 3266:114 3307:54 3490:154 3809:180 4383:430 4723:92 4838:141 4908:111 5074:236 5201:53 5874:116 6372:44 6866:194 7408:65 7494:218 7690:77 7707:12 7848:42 8151:124 8799:111 8804:94 8925:160 8986:237 9045:84 9357:171 9594:81 9800:51 10244:464 10353:112 10394:99 10443:144 10532:44 10636:12 10848:128 10971:96 11069:55 11616:158 11902:170 12170:77 12238:135 12604:108 12610:235 12781:112 12916:95 13165:99 13212:146 13562:72 13786:106 13812:186 13848:67 13881:40 13884:55 13929:47 14082:53 14239:76 14258:52 14458:56 14665:64 15204:142 16026:29 16382:111 16810:99 16852:129 16997:97 17003:120 17015:98 17374:117 17487:138 17621:102 17754:66 17939:148 18295:118 18364:117 18737:79 19255:45 19298:316 19685:176 19707:77 19787:56 +101:21 247:50 556:181 656:88 657:31 891:153 1040:70 1273:159 1406:154 1743:32 1788:173 1968:55 2324:42 2471:38 2602:41 2634:41 2718:52 2726:157 2751:73 2894:62 2934:37 3002:66 3174:29 3213:100 3578:24 3734:64 3755:80 3860:54 3976:154 4392:724 4450:109 4835:95 4955:93 5049:120 5057:21 5081:25 5096:102 5128:42 5698:43 5946:116 6060:51 6229:73 6274:53 6449:28 6484:105 6549:31 6575:25 6599:77 6859:66 6865:25 6866:404 6994:120 7053:38 7229:66 7494:209 7709:130 7729:26 7850:179 8099:24 8152:43 8154:39 8298:62 8333:55 8475:66 8488:36 8518:100 8548:54 8591:185 8794:115 8862:66 8913:28 8945:29 9170:77 9221:30 9379:103 9534:46 9824:32 9859:47 9923:29 9978:77 9987:59 10043:113 10161:75 10457:9 10532:272 10609:66 10753:89 10799:117 10848:102 10872:66 11142:31 11303:10 11487:107 11631:32 11668:53 11781:54 11822:41 11968:116 12170:20 12200:132 12319:21 12582:57 12610:60 12771:135 12916:23 13288:79 13407:65 13685:51 13779:203 13929:24 14161:10 14195:32 14446:99 14501:28 14805:51 14844:29 14883:45 14969:48 14973:24 15303:26 15492:22 15640:55 15780:16 15812:103 16060:50 16162:40 16287:60 16430:69 16487:41 16581:31 16584:63 16817:40 16974:22 17017:97 17369:36 17621:26 17841:72 17894:44 17949:33 18010:32 18050:22 18084:91 18356:134 18364:53 18768:30 18932:149 19139:76 19172:30 19187:52 19255:38 19386:13 19618:148 19752:18 19809:26 19921:37 19942:101 +865:120 1040:40 1244:259 1368:71 1565:448 1968:51 2062:66 3515:299 3578:29 3976:93 4427:69 4511:111 4640:149 4764:31 5043:198 5071:60 5783:98 6007:424 6197:144 6234:112 6866:672 7494:141 7709:198 7776:323 8099:135 8374:204 8416:103 8789:102 8804:166 9056:73 9109:50 9158:113 9234:113 9530:53 10229:100 10241:139 10457:76 10532:102 10925:164 10999:137 11400:22 11588:85 11642:65 11869:36 11871:319 12209:133 12235:91 12380:84 12781:73 12798:31 12916:56 12968:71 13143:98 13402:51 13498:148 13745:117 13806:86 14352:40 14608:108 14724:157 14749:85 15292:185 16328:71 16344:120 16373:39 17104:61 18295:54 18323:84 18397:5 18493:76 18711:221 19120:9 19788:109 19921:52 +130:88 247:102 1057:264 1268:79 1633:68 1912:125 2055:130 2720:94 2751:157 2894:42 2990:136 3051:75 3237:67 3578:14 4113:114 4308:185 4472:72 4473:110 4568:195 5128:171 5155:85 5435:77 6060:160 6234:74 6372:177 6374:43 6449:58 6866:134 7004:43 7334:70 7709:189 7908:86 8548:37 8821:19 8945:60 9490:129 9683:58 9886:62 9987:40 10187:99 10206:60 10532:81 10848:30 10998:62 11106:22 11400:97 11518:106 11685:108 11695:22 11877:115 12448:33 12610:123 12916:30 13006:44 13072:176 13355:85 13483:75 13664:103 13929:24 14161:62 14501:58 14689:34 15303:53 15774:39 15780:33 15793:53 15927:125 16084:82 16545:7 16709:90 16995:55 17237:896 17824:61 18115:123 18339:104 18364:15 18623:27 19109:72 19278:37 19386:26 19455:63 19787:123 +101:30 268:27 657:45 965:103 1040:10 1125:84 1244:67 1522:38 1951:37 1987:72 2015:66 2071:778 2091:108 2240:52 2311:43 2437:26 2509:67 2530:54 2934:101 3033:66 3266:120 3456:89 3578:15 4309:20 4346:32 4405:62 4831:88 5041:36 5057:30 5081:73 5090:70 5361:35 5431:53 5803:51 5903:52 5934:149 5945:40 6234:150 6530:117 6866:241 6917:41 6935:159 6994:89 7068:113 7086:78 7287:201 7363:181 7680:70 7709:395 8078:158 8152:31 8298:90 8475:153 8556:68 8786:45 8789:105 8908:103 9089:22 9166:40 9707:60 9734:111 9862:91 9966:52 9989:41 10241:62 10377:34 10457:13 10470:52 10530:46 10532:4 10618:85 11074:55 11106:71 11401:54 11781:78 11971:50 12161:45 12319:31 12380:70 12448:47 12674:45 12826:37 12916:12 13288:77 13374:70 13744:101 13829:78 13863:46 13931:50 14001:104 14144:60 14161:44 14221:60 14325:119 14556:31 14558:156 14697:35 14749:44 14820:108 14851:29 14987:52 15353:86 15523:121 15597:115 15777:56 15841:85 16005:48 16310:100 16317:173 16367:53 16391:98 16398:59 16881:41 17015:89 17017:47 17104:62 17161:143 17183:103 17457:90 17471:42 17564:69 17578:84 17717:49 18372:94 18460:131 18964:30 19117:47 19174:48 19473:178 19921:27 +606:126 944:98 1040:13 1125:71 1268:58 2210:690 2311:57 2560:16 2691:144 2751:63 3241:135 3242:270 3578:13 3970:123 4173:271 4308:58 4554:49 4737:30 5081:49 5574:104 5724:183 5946:109 6297:36 6339:130 6726:33 6865:98 6866:258 6873:121 7086:107 7183:32 7372:107 7599:45 7709:252 7797:82 7824:144 7928:64 8099:46 8325:105 8492:76 8703:50 8786:59 8936:60 8956:92 9446:37 9497:146 10216:28 10241:139 10443:69 10532:74 11070:59 11822:39 11869:110 12136:120 12426:74 12610:115 12811:47 12909:120 12916:21 12968:73 13964:49 13992:56 14161:39 14836:179 15368:92 15378:44 15586:58 15780:31 16144:85 16218:104 16441:78 16728:52 16810:202 17173:37 17279:47 17718:12 18373:404 18766:66 19095:54 19255:98 19278:69 19327:38 19434:78 19537:130 19546:57 19823:249 19843:107 19910:71 +80:28 239:98 247:69 569:118 1040:13 1338:107 1393:284 1533:89 1554:43 1572:61 2015:21 2125:80 2194:27 2213:61 2339:22 2560:113 2634:76 2764:87 2789:29 3308:39 3361:63 3578:39 3713:31 3817:91 3882:24 4096:51 4273:36 4500:523 4576:25 4664:46 5041:23 5132:46 5305:46 5385:81 5679:96 5717:50 5783:89 5900:37 5956:49 5985:71 6168:114 6374:39 6755:122 6792:58 6865:71 6866:494 6927:102 7129:129 7483:122 7494:100 7514:34 7536:39 7709:250 7865:169 8179:128 8370:56 8374:34 8490:88 8548:50 8627:28 8666:65 8720:11 8817:27 8913:52 8986:28 9157:56 9387:119 9424:25 9565:176 10309:48 10441:98 10457:17 10532:62 10677:81 10724:77 10779:31 10925:27 10939:58 11261:96 11446:44 11668:25 11685:43 11723:343 11822:19 11871:52 11964:33 12133:108 12317:64 12395:11 12419:144 12448:75 12535:43 12610:56 12916:5 12968:132 13381:38 13891:32 13929:33 13964:24 13967:75 14161:28 14280:27 14407:390 14461:89 14811:101 15492:60 15561:138 15757:52 15780:15 15853:61 15862:60 15999:343 16007:86 16034:200 16159:36 16315:70 16521:131 16705:35 16709:41 16997:46 17017:60 17376:24 17615:99 17949:30 18364:14 18474:36 18794:165 18894:87 18931:71 18943:66 19095:26 19135:52 19174:15 19210:19 19327:54 19386:210 19473:23 19707:81 19774:23 19889:28 19910:34 19925:45 19926:93 +23:70 685:122 1052:54 1704:56 2456:121 2528:76 2934:102 3293:65 3471:143 3578:14 3876:80 4155:557 4182:29 4376:93 4699:123 5363:100 5435:98 5509:23 5603:127 5946:191 6060:54 6318:70 6372:47 6749:88 6815:55 7210:146 7494:139 7709:178 7729:105 8099:97 8789:103 8804:143 9109:74 9788:128 9870:172 10779:134 10799:37 10954:58 11159:71 11668:216 11859:119 12238:84 12355:78 12427:157 12599:110 12610:61 12916:4 13929:48 13964:103 14495:83 14627:90 14947:19 15043:291 15277:653 15294:91 15798:107 16455:128 16529:114 16765:134 18364:30 18600:113 18825:122 18954:100 19659:73 +251:75 548:109 865:96 981:90 985:90 1040:21 1889:158 1987:75 2493:652 2528:29 2630:92 2720:132 2840:449 2990:23 3578:10 3661:26 3779:106 3988:92 4510:92 4585:185 4790:73 4957:164 5041:74 5783:73 5831:65 6190:138 6274:79 6543:79 6829:124 6866:517 7307:116 7709:136 8081:31 8261:380 9049:65 9325:133 9457:30 10161:124 10200:96 10532:315 10799:156 10848:40 10925:87 11342:65 11425:102 12157:90 12238:150 12363:115 12916:3 12935:64 13224:55 13760:131 13829:64 13929:36 14851:61 15035:108 15429:95 15522:185 15798:79 16373:10 16382:97 16711:97 16810:110 16851:47 17102:106 18035:45 18364:23 18704:172 19004:80 19024:46 19165:134 19255:140 +122:144 151:132 626:115 1040:143 1244:94 1832:73 1914:85 1968:74 2428:120 2522:140 2747:60 2794:70 3012:110 3084:98 3219:157 3308:86 3619:61 3643:34 3754:159 3976:57 4085:66 4723:192 5159:75 5577:151 5783:62 6243:111 6866:454 7483:134 7494:19 7709:162 7959:9 8099:97 9023:115 9233:23 9325:104 9596:170 9715:148 9766:66 10161:96 10241:142 10244:241 10276:72 10443:105 10457:219 10483:59 10650:135 11460:182 11685:67 11822:83 12170:239 12595:159 12916:106 12982:60 12992:132 13384:66 13812:193 13829:104 13881:165 13929:48 13998:28 14662:279 14665:67 14749:123 15116:89 15339:274 15948:122 16034:50 16382:126 16432:110 16798:34 16952:166 16974:88 16997:100 17069:89 17487:216 17621:211 18115:75 18364:122 18372:117 18493:219 19109:104 19120:28 19293:162 19386:51 19685:122 +218:752 488:63 965:44 1040:17 1057:50 1070:33 1241:38 1522:65 2528:152 2529:57 2538:114 2894:25 3043:99 3086:25 3150:42 3187:31 3331:62 3437:74 3578:13 3988:38 4041:298 4468:59 4489:86 4494:116 4569:44 4576:65 4925:45 4957:115 5016:73 5361:30 5612:40 5945:101 6060:47 6153:28 6274:32 6779:124 6866:274 7009:89 7214:59 7494:141 7547:80 7709:263 8078:96 8079:46 8393:70 8548:22 8789:58 8851:76 8913:35 9373:298 9671:88 9987:24 9989:70 10161:177 10443:56 10532:119 10799:82 11022:53 11303:140 11409:37 11476:95 11968:95 11993:116 12120:56 12161:38 12188:87 12238:37 12268:50 12511:113 12610:37 12674:58 12798:104 12916:7 13006:53 13154:74 13384:38 13547:57 13551:71 13711:40 13829:285 13964:31 14161:12 14167:33 14790:108 14851:25 15521:298 15726:45 15791:77 16112:45 16638:47 16810:139 17104:80 17272:36 17904:43 17961:123 18364:28 18650:59 19386:16 +80:114 492:50 732:74 739:66 1052:100 1530:62 1549:103 1987:95 2242:97 3150:128 3266:157 3308:80 3460:50 3578:26 3614:9 3643:48 3988:116 4319:54 4521:135 4576:201 5783:19 6234:44 6306:54 6313:59 6464:131 6571:182 6866:608 6984:101 7008:96 7494:205 7709:325 7739:241 8631:134 8710:97 8759:104 8789:64 8804:61 8899:95 8943:98 9376:125 9671:150 9711:114 9806:51 10377:88 10430:152 10443:163 10779:250 10883:103 11106:84 11224:291 11588:343 11608:75 11871:124 11968:146 12085:138 12238:135 12351:141 12448:123 12509:105 12916:17 12968:80 13857:187 14161:38 14168:33 14196:132 14525:70 14556:160 14724:94 14890:101 14987:137 15777:244 15844:77 16225:161 16478:98 16496:137 16866:73 17069:93 17249:90 17546:495 17961:90 18668:110 19109:93 19210:79 19271:43 19282:80 19327:74 19921:70 19926:45 19935:127 +101:33 120:764 344:66 569:96 981:47 1312:126 1691:91 1968:29 2276:117 2418:443 2634:64 2990:55 3578:22 3643:162 3988:48 4308:95 4328:47 4455:122 4472:55 4579:55 5128:230 5220:139 5305:39 5760:91 5877:39 6223:34 6344:9 6374:33 6582:131 6866:333 6906:26 7363:33 7393:68 7709:204 7729:41 8081:12 8475:6 8637:85 8759:94 8945:46 9565:115 10350:65 10905:49 10936:120 10998:48 11216:85 11303:107 11355:100 11458:189 11571:155 11588:48 11685:99 11871:99 11877:44 12448:26 12916:10 12918:148 12968:69 13006:68 13529:191 13929:19 14161:48 14483:2 14697:38 14735:96 14968:60 15206:95 15294:143 15303:41 15492:34 15700:82 15774:31 16464:45 16581:49 16810:300 16838:67 16995:42 17279:39 17745:92 17863:144 18672:120 18704:91 18744:82 18894:50 19278:28 19327:62 19386:60 19388:97 19455:48 19610:73 19984:81 +651:95 712:111 732:45 985:101 1040:33 1052:31 1157:96 1188:92 1565:62 1788:62 1968:127 1981:50 2062:55 2528:101 2621:33 2878:190 2894:48 2934:219 2990:88 3084:128 3308:49 3352:78 3412:148 3578:16 3767:98 4153:522 4200:69 4232:102 4305:173 4526:63 4707:54 4807:60 5232:9 5877:114 6306:28 6525:50 6866:238 7334:106 7363:49 7494:190 7709:151 7850:59 7881:81 8161:109 8215:58 8393:3 8507:48 8737:61 8789:105 8799:114 8897:89 9596:97 9661:82 9936:104 9987:46 10210:139 10238:80 10276:41 10443:48 10457:105 10532:189 11140:162 11463:93 11501:87 12170:46 12173:77 12238:87 12305:131 12342:74 12365:48 12540:74 12562:70 12732:121 12916:52 13006:50 13288:123 14158:121 14190:88 14263:31 14483:87 14749:70 14880:103 15722:51 16029:67 16591:61 16651:484 16682:61 16912:69 16929:34 17519:97 17621:60 17782:84 17899:90 18189:103 18380:82 18676:29 18697:117 19210:97 19229:111 19265:365 19269:60 19278:42 19386:117 19420:57 19496:68 19809:61 19921:43 +247:88 732:46 869:57 981:35 1052:31 1070:63 1279:45 1481:41 1715:29 2163:140 2173:55 2185:40 2209:148 2339:28 2690:35 2820:131 2894:24 2934:122 3150:40 3578:29 3849:89 3860:64 4011:73 4259:42 4308:106 4387:52 4472:82 4493:100 4576:31 4723:28 5081:30 5128:221 5220:168 5361:29 5367:46 5487:103 5990:90 6060:54 6212:165 6234:50 6374:25 6633:81 6837:35 6866:478 7089:46 7210:42 7296:66 7363:25 7709:196 7896:143 7908:71 7980:58 8318:83 8548:42 8755:61 8789:106 8913:33 9044:67 9334:63 9351:53 9425:79 9537:155 10001:52 10470:43 10585:127 10925:34 10936:153 10998:36 11131:79 11224:60 11245:107 11358:56 11501:42 11871:232 12310:88 12599:21 12610:71 12916:14 12935:25 13006:25 13143:129 13384:78 13498:46 13685:30 13881:129 14067:26 14161:24 14167:62 14947:109 14969:57 14988:114 15140:74 15204:76 15294:27 16068:32 16464:34 16581:74 16584:25 16611:83 16995:32 17069:79 17173:23 17272:34 17297:46 17882:85 17904:41 17973:75 18018:281 18082:133 18191:46 18364:18 18399:72 18768:142 18986:44 19070:130 19095:33 19109:128 19117:76 19291:65 19327:23 19386:104 19431:45 19634:45 19694:761 19774:58 19873:38 19925:57 19935:39 19947:58 +422:135 585:65 1040:83 1057:126 1715:49 1968:108 2543:119 2709:80 2934:148 3578:21 3664:66 3713:65 3860:54 3932:185 3949:76 4412:67 4617:75 4707:46 4751:72 5128:41 5328:106 5379:45 5417:113 5717:106 5775:80 6073:99 6172:77 6180:40 6229:104 6234:144 6443:124 6511:93 6554:271 6654:70 6866:269 6927:109 7438:54 7494:93 7709:46 7739:84 7753:23 7760:185 7850:50 7877:84 8078:31 8087:135 8703:52 8722:83 8786:184 8821:101 8943:51 8993:285 9033:76 9081:126 9596:124 9614:53 9711:109 9870:84 9987:39 10161:85 10350:81 10457:18 10532:217 10663:160 10789:95 10795:158 11106:90 11399:72 11400:16 11721:147 11822:81 11952:99 11953:570 12136:62 12161:61 12355:86 12448:32 12466:73 12529:111 12796:88 12798:78 12916:21 12935:42 13407:64 13453:80 13485:60 13517:115 13529:26 13685:51 13881:110 13929:24 14440:147 14697:48 15135:77 15294:45 15772:57 15777:179 16568:97 16713:152 16997:49 17045:60 17272:57 17374:194 17923:85 18050:44 18115:136 19255:178 19290:110 19386:50 19571:94 19758:49 19774:48 19881:139 19955:79 +203:59 732:37 989:104 1052:25 1110:46 1166:62 1312:133 2468:61 2789:60 2825:201 2990:73 3578:20 3643:86 3778:67 3882:50 3979:86 4472:67 4512:141 4751:70 5354:111 5874:57 5925:27 6351:12 6866:194 7055:36 7271:62 7448:26 7494:161 7640:67 7700:284 7709:73 8202:554 8333:53 8488:90 8511:21 8632:165 8790:143 8817:57 10238:66 10457:34 10532:379 10559:446 10574:154 10731:114 10936:62 10999:149 11596:129 11716:315 11822:39 11871:242 11964:69 12238:137 12529:31 12599:68 12829:89 12916:9 12985:77 13337:64 13746:294 13964:49 14161:19 14456:95 14653:76 15477:118 16034:126 17173:74 18084:87 19109:123 19174:31 19210:119 19293:89 19812:76 19926:43 +247:53 341:126 493:21 833:113 1040:15 1114:124 1273:149 1685:115 2042:106 2528:101 2660:57 2690:63 2743:72 2847:50 3052:87 3578:30 3988:66 4308:129 4472:75 4642:141 4675:31 4764:53 5128:179 5323:85 5502:78 5937:54 6234:142 6255:477 6859:70 6866:191 6917:61 6972:109 7156:79 7363:45 7379:136 7483:71 7709:179 7915:105 8459:65 8548:39 8804:153 9109:154 9325:149 10030:79 10035:135 10241:86 10532:72 10799:100 10848:68 11322:58 11328:56 11539:151 11573:66 11674:195 11877:120 12222:99 12238:23 12433:692 12448:35 12535:100 12595:84 12610:97 12916:15 12968:146 13161:126 13388:102 13685:55 14161:22 14167:57 14328:145 14421:44 15294:48 15774:125 15953:165 16646:22 17039:72 18115:76 18364:16 18372:162 18464:98 18792:86 18921:87 19144:93 19278:39 19455:66 19774:158 19904:71 19926:60 +8:38 90:59 865:112 944:18 1040:66 1610:103 1990:60 2172:60 2227:116 2302:132 2528:17 2638:113 2751:26 2851:77 2894:48 3206:31 3266:8 3314:272 3383:157 3578:16 3887:65 3908:719 4637:114 4707:54 4793:124 5361:170 5760:126 5877:57 6373:144 6577:136 6642:162 6711:97 6866:281 7182:431 7494:128 7548:21 7739:50 7947:77 8043:12 8347:27 8381:63 8759:171 8789:216 9056:122 9362:149 9837:158 9905:125 9923:68 9987:46 10127:85 10161:108 10260:109 10394:26 10457:63 10532:39 10636:16 10848:27 10870:18 11296:125 11588:71 12406:65 12604:75 12916:25 13982:109 14113:98 14161:47 14258:75 14556:50 14575:158 14735:36 14749:71 14880:52 16436:35 16968:96 17545:135 17621:61 17933:106 18110:56 19625:6 19769:85 19787:69 19894:125 +101:91 227:80 520:66 1479:89 1522:114 1715:325 2028:400 2751:85 2777:510 2872:95 3051:58 3361:130 3578:15 3643:75 3713:143 4494:166 4820:130 5056:262 5128:90 5304:100 5305:108 5690:119 5830:28 6027:193 6234:162 6866:653 7129:103 7494:109 7709:161 7957:140 8349:160 8556:102 8799:35 8925:129 9325:51 9594:3 9596:91 9923:126 10848:165 11851:59 11971:151 11997:97 12238:153 12749:180 13006:188 13143:120 13529:54 13829:70 13929:52 14429:129 15523:90 15785:82 15922:350 16488:120 16584:183 16662:63 16807:129 17270:153 18734:153 18894:137 +447:67 733:130 1040:12 1052:22 1094:112 1097:84 1100:87 2528:124 2792:18 2894:35 3331:425 3578:12 3876:109 4554:43 4614:64 4820:111 4831:95 4908:86 5057:105 5574:92 5821:92 6133:86 6234:214 6274:45 6306:60 6365:89 6866:335 6917:48 7287:76 7464:119 7494:9 7709:232 7781:88 7872:62 8431:49 8433:124 8590:656 8789:119 8913:96 8928:107 9180:37 9325:145 9432:63 9497:155 9601:100 9834:78 10377:40 10441:120 10448:104 10532:230 10773:55 11106:16 11400:53 11668:91 11822:35 11871:105 12238:131 12582:51 12842:75 12916:14 12968:108 13006:37 13438:130 13516:70 13828:143 13929:20 14082:72 14606:62 14893:64 15294:38 15627:124 15798:45 15813:86 15853:112 16132:41 16432:46 16452:102 17595:451 18364:13 18586:83 18737:82 18800:71 19109:75 +12:19 101:49 499:607 981:70 1636:527 1691:58 1951:60 2335:95 2784:76 2934:136 2990:89 3087:100 3307:155 3447:78 3496:349 3507:71 3515:50 3578:33 3858:220 3864:124 3948:122 4102:85 4491:91 4614:44 5049:112 5253:77 5992:109 6504:93 6549:72 6865:60 6866:405 7363:98 7496:135 7709:320 8211:78 9043:70 9368:55 9585:44 10532:338 11400:19 11668:126 11685:184 11871:63 12038:95 12313:58 12380:184 12406:81 12720:92 13645:87 13929:56 14571:40 14851:48 14930:65 15231:72 15492:51 15774:106 15881:116 16464:134 16486:105 16569:4 16584:99 16657:89 16817:94 16851:146 17069:89 17102:43 17243:27 17431:72 18493:127 18623:98 19109:38 19238:90 19386:30 19926:97 +101:55 112:93 873:74 944:117 1052:69 1167:57 1168:194 1466:16 1659:104 2240:95 2528:142 2746:106 2751:1 2934:176 2944:101 3086:53 3308:55 3433:50 3518:53 3578:45 4010:423 4205:110 4473:65 4493:47 4554:67 4568:91 4576:139 4579:22 4614:50 4686:69 4818:72 4826:85 5713:110 5815:94 5946:88 6315:85 6866:317 6955:179 6973:74 7087:674 7449:182 7494:58 7709:102 8333:72 8475:118 8548:47 8666:92 8710:134 8717:78 8789:51 8816:59 9116:109 9891:114 10275:105 10276:46 10333:133 10393:134 10532:204 10767:109 10773:84 10779:260 10848:151 11004:85 11069:75 11822:53 12319:56 12463:38 12610:39 12916:16 13547:121 13632:87 14161:53 14222:72 14258:47 14851:53 15144:31 15995:79 16084:104 16810:72 17182:31 17254:201 17365:112 17701:66 17935:103 18160:71 18224:105 18364:20 18397:58 18492:90 19055:99 19386:33 19660:129 +90:162 247:92 841:102 944:40 1004:60 1052:49 1462:119 1951:47 1957:69 2403:79 2634:75 2934:34 2990:138 3578:19 3647:75 3713:61 3915:67 3932:43 4576:49 4758:121 5783:57 6095:102 6234:227 6406:71 6449:52 6866:309 7185:142 7596:56 7709:74 7748:103 8548:33 9317:37 10241:91 10292:74 10470:67 10532:318 11187:100 11218:176 11263:102 11484:142 11488:425 11871:70 11881:78 12178:61 12238:99 12610:55 12704:124 12916:8 13298:53 13685:94 13874:87 13881:149 13964:47 14082:159 14161:56 14361:81 14792:78 14851:37 14880:78 14938:148 15079:62 15151:69 15294:42 15492:40 15549:147 15710:83 17104:40 17316:804 17523:59 18100:67 18115:100 18160:50 18339:142 18508:142 18741:107 18768:55 18794:112 18894:58 19109:87 19386:23 19612:60 19889:55 19926:51 +340:64 520:108 626:243 812:64 1524:75 2560:99 2914:53 3537:91 5178:267 5203:691 5246:165 5504:106 5783:205 6234:285 6866:262 7004:87 7176:67 7477:100 7709:161 8081:71 8804:69 8821:33 9140:68 9565:143 9594:29 10244:381 10394:19 10443:192 11106:74 11530:115 11533:76 11994:277 12916:85 13384:125 13529:94 13678:80 13905:155 13929:102 14239:167 15481:81 15740:44 16029:12 16478:139 16974:185 17337:34 17487:151 17863:61 18190:152 18295:76 18470:33 18545:137 19109:126 19685:257 +268:114 841:69 1057:108 1298:123 2037:74 2062:73 2482:52 2593:96 2934:133 2990:132 3002:43 3021:127 3578:21 4494:56 4573:113 4637:224 5197:134 5523:108 5777:119 5877:151 5956:58 6149:76 6527:181 6636:442 6866:421 7129:57 7494:103 7709:32 7853:74 8164:139 8542:475 8804:118 9043:91 9056:240 9127:171 9279:84 9719:86 10203:37 10241:63 10428:78 10457:55 10532:151 11011:295 11070:23 11695:77 11704:35 12420:113 12781:157 12895:172 12916:69 13080:55 13234:114 14158:160 14161:155 14194:96 14734:75 14938:67 15080:123 15372:76 15374:206 15411:50 15812:106 16327:51 16810:69 17069:178 17079:95 17173:60 18438:93 18893:51 18964:59 19151:157 19255:89 19278:166 19411:62 19894:104 +626:97 1259:143 1522:180 1889:65 1914:72 2007:54 2660:66 2990:123 3314:267 3505:45 3614:143 4201:48 4494:11 4723:162 4984:65 5163:65 5240:119 5315:48 5449:35 5464:36 5487:106 6080:94 6234:276 6260:101 6285:79 6372:96 6866:290 7494:88 7709:322 8003:32 8267:145 8608:92 8789:42 8804:57 9325:92 9460:69 9596:287 9792:154 9923:100 10027:18 10244:305 10381:126 10457:93 10516:88 10532:134 10635:84 10799:91 10965:56 11330:86 11400:51 11971:120 12170:403 12238:102 12419:54 12772:131 12916:93 12968:65 13165:174 13224:5 13881:129 14218:35 14239:67 14369:75 14665:56 15362:494 15915:120 16019:106 16223:70 16336:56 16427:64 16810:85 16952:140 16997:85 17083:185 17270:134 17487:122 18364:206 19255:87 19420:89 19453:8 19473:84 19685:206 19707:60 19787:214 +80:312 879:49 1293:126 1472:84 1879:92 2664:142 2990:177 3012:69 3122:70 3397:572 3610:63 3614:25 4427:92 5220:80 5368:527 5385:55 5532:173 5946:22 6234:161 6285:155 6347:146 6559:94 6652:45 6696:49 6815:74 6866:244 6933:93 7494:162 7709:45 7999:3 8801:30 9050:70 9417:123 9594:147 9982:143 10035:116 10161:105 10200:331 10241:80 10532:178 11253:55 11400:44 11898:102 12191:71 12206:42 12238:78 12599:95 12610:156 12655:75 12916:28 13351:93 13881:37 14729:92 15294:234 15523:121 16034:139 16593:102 17770:92 18261:67 18894:326 19109:30 19712:129 19787:94 +295:132 626:200 877:92 1312:98 1674:85 1914:74 2309:139 2355:90 2522:121 2619:65 2934:80 2990:59 3307:166 3308:74 3361:204 4587:94 4703:103 4723:83 4906:98 5074:212 5206:129 6060:248 6234:49 6866:566 6973:69 7360:58 7494:124 7709:168 8085:27 8433:27 8544:96 8986:213 9245:69 9258:50 9406:80 9596:441 9806:93 10241:66 10244:313 10443:44 10457:32 10626:114 10799:69 11006:91 11685:30 11871:191 11950:66 11994:114 12170:138 12406:147 12916:89 13165:178 13881:109 13929:42 14239:69 14662:484 14665:58 14682:139 14874:51 15378:51 15980:126 16034:149 16224:135 16519:62 16821:68 16974:152 16997:87 17003:108 17374:91 17487:125 17895:107 18007:175 18011:149 18364:79 18397:119 18586:107 18744:145 19169:71 19473:173 19685:264 19890:138 +626:242 970:83 1052:113 1057:30 1316:91 1789:41 1855:91 2000:657 2117:68 2726:146 2751:115 2934:127 3020:84 3283:117 3307:82 3356:31 4215:76 4443:20 4675:96 5696:41 6234:90 6866:252 7395:12 7407:239 7494:205 7682:54 7709:105 7739:182 7896:55 8098:120 8162:7 8379:90 8789:58 8804:82 9180:186 9415:92 9734:55 10035:55 10163:34 10172:74 10244:127 10276:303 10532:178 10833:261 10848:61 11070:43 11106:63 11245:56 11400:48 11570:21 11871:125 11956:56 12007:158 12406:67 12732:222 12916:62 12939:59 13032:92 14175:80 14556:181 14743:139 14856:13 15103:61 15438:37 18499:328 19182:105 19592:51 +476:40 520:34 529:46 994:83 1040:12 1057:7 1298:34 1578:50 1718:114 1951:22 2182:69 2324:36 2493:37 2630:78 2990:82 3578:18 3586:31 3932:197 3988:26 4097:20 4308:25 4383:31 4495:37 4684:36 4757:141 4995:95 5041:62 5487:118 5679:117 5842:86 5871:45 6095:281 6168:26 6172:33 6372:106 6676:43 6792:80 6866:444 6955:58 7226:104 7494:12 7709:129 7808:59 7840:51 7844:79 8228:129 8297:87 8320:95 8362:30 8379:84 8488:133 8703:22 8804:164 8885:15 9109:84 9167:59 9800:113 9923:25 10187:58 10238:29 10241:123 10356:59 10443:73 10532:140 10620:22 10848:78 11057:63 11168:25 11224:87 11401:32 11446:40 11603:233 11822:69 11871:73 11937:65 12113:607 12136:26 12198:42 12320:168 12610:25 12699:30 12769:165 12916:14 12933:93 12951:104 12968:41 13079:106 13283:95 13583:63 13881:28 14040:73 14161:77 14313:69 14384:62 14792:107 14851:102 15058:108 15205:147 15374:28 15484:184 15573:167 15798:22 15966:41 16068:13 16223:32 16464:24 16602:369 16717:38 17105:32 17196:33 17198:105 17523:244 17564:40 17702:96 18160:138 18206:259 18364:13 18737:108 18916:93 18943:74 19041:28 19095:24 19210:35 19386:43 19427:58 19758:63 19809:22 19834:28 +76:99 388:97 657:57 1004:108 1743:59 1889:112 1987:91 2194:53 2240:132 2896:93 2910:20 2990:23 3045:80 3160:97 3307:80 3578:19 3713:60 3988:56 4308:55 4341:119 4427:87 4430:83 4503:102 4576:48 4614:35 4654:93 4682:78 5383:78 5449:42 5821:83 6132:78 6221:132 6234:44 6372:141 6374:38 6507:90 6705:112 6771:176 6866:152 7709:125 7729:47 7837:97 7845:75 7848:28 8078:28 8099:44 8152:79 8475:76 8548:66 8615:149 8666:64 8759:105 9792:113 10532:192 10626:100 10735:36 11303:73 11328:47 11518:47 11668:49 11822:37 11871:138 12206:85 12238:20 12406:95 12448:89 12529:118 12591:589 12610:55 12916:8 13006:40 13529:112 13570:585 13631:98 13632:61 13685:47 13907:87 13929:43 14161:18 14404:41 14851:37 14947:148 15204:19 15294:41 15386:135 15492:118 15674:21 15777:119 15811:74 16371:81 16419:163 16524:61 16810:155 18017:91 18364:14 18372:29 18397:119 18800:80 18894:115 19024:130 19255:118 19261:77 19386:46 19455:56 19564:32 19659:86 19745:78 19787:150 +80:182 221:81 303:52 388:161 643:122 953:64 1633:50 1795:638 1889:42 2123:78 2296:58 2394:57 2934:45 2990:95 3045:67 3460:39 3578:11 3870:46 4007:24 4328:45 4344:58 4495:67 5588:56 5946:79 6234:151 6377:91 6449:85 6577:41 6859:50 6866:98 7494:167 7709:74 7836:60 7957:61 8099:36 8337:65 8537:124 8548:27 8615:247 8759:72 8799:70 8804:152 8821:50 9324:58 9446:29 10483:88 10532:224 10741:198 10799:93 10829:62 11224:39 11328:39 11372:56 11685:100 11871:100 12320:155 12448:25 12604:56 12610:137 12916:15 12968:78 13435:58 13881:128 13929:18 14161:46 15294:68 15558:545 15854:103 16382:50 16515:59 16810:91 17343:115 17547:62 17562:75 17932:8 18160:41 18364:23 18800:62 18809:74 18943:87 18964:119 19309:150 19327:149 19386:19 19774:37 19880:80 +744:354 1538:512 1889:68 2325:94 2384:91 2478:83 2491:97 2528:51 2591:37 2990:52 3614:101 3654:52 4063:107 4097:74 4132:336 4308:219 4646:116 5426:68 5607:149 5609:140 5790:35 5937:186 6866:177 7129:119 7494:185 7709:224 8151:121 8217:56 8433:102 8789:45 8799:104 9006:116 9180:159 9713:23 9872:107 9971:77 10241:17 10443:106 10532:131 11321:540 11333:358 11871:110 12674:122 12916:13 13079:101 13324:121 13724:80 14161:74 14177:104 14640:94 14729:70 14963:63 15605:146 16192:107 16223:89 17896:126 19593:84 +112:81 432:251 615:76 1290:125 1391:81 1565:360 2751:107 2894:47 3307:17 3515:144 3578:8 3684:80 3966:56 4191:318 4428:163 5379:21 5612:218 5877:56 5946:16 6234:101 6364:56 6866:424 7494:154 7564:74 7709:272 8146:93 8201:64 8215:56 8412:167 8420:89 8804:78 8953:109 8980:281 9056:118 9349:128 9433:56 9511:124 10443:53 10508:76 10532:172 10799:84 10932:88 10999:46 11106:46 11135:74 11685:140 11695:25 11727:98 11873:357 11918:76 12007:67 12118:168 12139:11 12292:131 12599:55 12666:188 12702:91 12914:153 12916:41 12968:57 13090:131 13106:402 13143:130 13356:75 14067:49 14158:59 14161:23 14194:65 14226:121 14236:45 15115:64 15605:105 17501:104 17996:167 18169:47 18618:174 18916:54 19210:47 19255:159 19278:41 19805:36 19935:76 +1416:474 1593:82 1758:103 2402:267 2528:54 2739:85 2792:34 3307:89 3334:6 4018:19 4909:346 4914:372 6106:39 6865:182 6866:261 6984:8 7428:87 7494:181 7709:383 8055:39 8083:79 9068:77 9116:298 9800:124 9859:65 9867:529 10017:88 10043:103 10241:19 10799:60 10845:156 11245:149 11290:76 11685:100 11699:70 11871:16 11880:103 12206:105 12677:66 12918:124 12968:45 13143:79 13482:125 13511:168 13870:117 13877:138 13881:160 14384:99 14385:161 14844:206 15092:52 15523:117 15597:148 15844:46 16488:183 16682:20 16810:86 16919:137 17071:136 18115:119 19120:106 19132:19 19255:58 19996:124 +626:367 1057:98 1699:81 1914:91 2416:76 2522:149 2529:80 2660:116 2934:160 2990:112 3032:127 3976:89 4612:113 5379:110 5473:76 5937:15 6027:43 6234:128 6368:123 6372:80 6406:20 6642:136 6796:56 6866:431 7055:81 7129:137 7206:87 7494:132 7682:40 7709:220 8078:67 8176:93 8461:70 8789:58 10161:90 10244:384 10443:154 10848:85 11007:85 11068:5 11164:156 11216:141 11400:127 11822:88 11871:200 11898:115 12004:102 12007:89 12238:51 12320:115 12406:139 12540:43 12582:26 12610:259 12632:119 12916:117 12928:126 12968:48 12992:141 13012:23 13101:48 13165:109 13285:160 13546:66 13881:125 14081:81 14239:84 14638:79 14662:593 14665:71 14753:29 15045:70 15605:36 15777:140 15980:155 16748:56 17487:153 17621:112 17770:118 18364:194 18372:53 18454:91 19144:99 19446:70 19685:259 19894:55 19951:76 +56:110 626:274 1040:68 1057:42 1177:131 1633:160 1914:101 1968:176 1970:43 2309:190 2528:7 2751:78 2822:42 2957:64 2990:57 3176:66 4102:114 4190:122 4427:64 4637:117 4683:113 4820:56 6304:66 6372:62 6866:117 7390:33 7661:86 7709:238 8540:95 8645:87 8804:104 8871:86 8986:146 9203:128 9325:139 9379:165 9615:163 10161:82 10241:116 10244:573 10471:30 10784:58 10848:120 11185:274 11695:134 11871:248 11994:156 12610:73 12916:157 13929:57 14019:161 14239:94 14399:163 14460:70 14642:18 14665:79 14947:21 15293:105 15812:166 15948:291 16682:56 16851:141 16888:46 16908:23 17487:342 17499:136 17712:108 18199:50 18372:143 18438:61 18543:58 18779:87 19024:174 19165:85 19255:107 19323:62 19685:218 19787:110 +285:540 312:95 1052:186 1638:53 1889:101 2990:153 3539:48 4011:125 4181:29 5304:98 5946:145 5956:21 6229:56 6866:67 6991:53 7494:206 7709:401 8151:162 8312:158 8788:122 8804:173 9068:105 9143:108 9325:87 9363:101 10244:208 10488:71 10848:214 11245:87 11400:91 11984:14 11994:455 12355:94 12406:97 12448:228 12798:105 12916:51 13829:92 13982:134 14239:274 14413:126 14627:110 14947:121 15230:149 15258:52 16810:55 17230:65 17487:249 18364:211 18885:100 19255:112 19455:431 19707:75 19787:114 +343:377 520:81 662:162 702:126 983:35 1040:113 1553:133 1981:61 2025:83 2091:76 2185:116 2243:81 2528:62 2990:86 3578:28 4270:70 4857:554 5132:98 5163:127 5507:227 5699:51 5783:86 6060:78 6084:264 6171:72 6234:108 6866:785 6948:126 6994:100 7709:317 7769:162 8078:125 8312:30 8556:90 8786:249 8789:56 8804:111 9800:50 9960:40 10019:119 10953:137 11871:192 12238:82 12319:172 12916:14 12968:155 13055:77 13727:111 13881:136 14158:208 14820:299 14878:397 14947:69 14967:107 15009:68 15438:120 15444:112 15448:65 15867:196 16810:174 18372:37 18599:117 18714:33 19109:27 19174:132 19255:122 +101:75 321:92 1052:47 1527:76 1538:252 2617:106 2872:85 2934:141 2990:107 3413:265 3578:12 3827:103 3846:120 3928:120 3988:110 4011:24 4402:110 4744:78 4889:703 5041:88 5119:114 5207:112 5305:89 5379:119 6158:137 6234:86 6372:132 6593:27 6866:37 7011:29 7690:10 7709:150 7896:4 8148:209 8349:132 8420:125 8433:164 8767:37 8799:71 8925:167 9004:103 9068:78 9554:93 9565:27 9616:265 10443:118 10925:104 11064:123 11518:185 11642:21 11668:96 12191:59 12238:120 12463:149 12798:49 12916:3 13114:280 13384:85 13685:92 13829:68 13929:43 14167:95 14310:68 14373:60 15294:81 15747:37 15798:94 16225:152 16515:119 16682:90 17279:88 17773:105 17896:164 18115:36 18916:94 18973:99 18975:135 18991:83 19024:108 19062:70 19162:139 19921:66 +953:11 1268:87 1565:296 1820:98 2528:138 2607:640 2874:69 2990:68 3433:105 3445:49 3499:94 3578:19 3598:54 4344:118 5125:73 5487:45 5945:153 6125:26 6801:88 6859:183 6866:250 6973:18 7091:31 7182:205 7494:40 7709:417 8224:100 8759:146 8799:78 8917:77 9614:298 10276:99 10461:37 10532:95 10914:56 11069:58 11871:16 12206:86 12616:60 12781:108 12798:136 12916:20 12932:43 12968:117 13378:98 14638:90 14643:140 14800:224 15118:68 15294:126 15390:52 15411:102 15780:89 16149:156 16239:12 16532:270 16566:122 16773:380 16805:127 16810:161 18352:68 18676:163 18752:23 19255:38 19926:73 +517:110 626:219 714:156 732:150 1040:109 1057:95 1699:146 2000:595 2610:106 2934:156 3334:125 3361:97 3916:47 4595:40 4820:83 5439:137 5487:95 5594:108 6234:134 6387:117 6866:439 7709:85 8342:125 8645:66 8789:192 9307:96 9508:46 9511:115 9563:44 10035:154 10119:34 10161:159 10179:64 10244:344 10799:62 10848:35 11069:109 11871:271 11994:251 12320:138 12903:445 12916:63 13529:139 13929:92 14111:128 14179:135 14239:151 14918:122 15423:33 15674:41 15887:134 16682:39 16770:78 16810:175 16974:167 17487:137 17948:164 18397:74 18773:89 19139:293 19685:116 19787:26 +151:115 225:48 626:201 1040:200 1052:47 1824:167 1914:75 2591:57 2934:125 4152:130 4499:70 4723:168 4820:51 5130:143 5550:117 6313:83 6866:238 7129:28 7494:302 7632:86 7709:322 8342:46 8774:132 9126:293 9966:135 10043:102 10244:316 10457:32 10532:50 10799:79 11181:63 11186:16 11330:89 11487:127 11657:41 11871:396 12170:696 12238:136 12916:90 13570:142 13829:56 13881:84 14236:23 14239:69 14665:58 14682:36 15355:140 15777:59 16545:25 16797:40 16810:99 16882:76 16974:153 17487:126 18287:226 18364:80 18568:44 18893:59 19109:134 19255:93 19685:107 19760:171 +87:198 96:228 303:170 489:14 548:60 958:44 1040:12 1070:45 1244:38 1316:52 1615:63 1801:79 1889:113 1990:43 1993:172 2311:150 2395:73 2820:139 3007:202 3093:148 3433:221 3443:151 3504:135 3515:35 3578:6 3617:183 3882:43 3988:103 4200:49 4383:122 4723:39 4757:105 5057:69 5162:32 5204:156 5347:68 5446:2 5449:9 5874:50 5946:22 6067:52 6234:229 6456:63 6465:124 6549:51 6866:242 6876:78 7090:85 7210:120 7406:155 7494:199 7709:343 7748:54 7908:99 7938:59 8024:82 8146:132 8196:80 8301:192 8504:203 8804:77 8817:49 8838:94 8908:50 8939:106 8943:43 9109:97 9276:131 9526:60 9614:45 9819:131 9906:70 9948:121 10035:159 10187:95 10218:315 10241:53 10276:59 10350:68 10377:156 10530:52 10532:225 10569:75 10735:66 10799:68 10848:248 10872:55 11011:160 11342:72 11400:83 11698:156 11853:152 11871:59 11898:100 11958:62 12238:84 12319:36 12781:63 12798:145 12805:45 12916:9 13730:92 13744:85 13881:47 14082:15 14119:123 14375:84 14455:128 14556:35 14612:76 14793:82 14947:59 15292:55 15303:43 16089:128 16115:99 16223:69 16478:111 17013:95 17045:51 17077:132 17134:80 17249:141 17711:49 17854:106 18364:13 18413:60 18438:119 18499:64 18597:60 18622:114 18769:124 18908:103 18943:83 19278:60 19545:215 19634:64 19712:65 19739:46 19873:161 +608:95 933:46 1268:132 1287:127 1774:112 2025:115 2237:69 2419:163 2934:14 3361:70 4259:263 5326:74 5426:120 5646:63 6866:276 7287:69 7491:447 7494:27 7686:102 7709:117 7723:331 7775:58 10187:112 10443:52 10476:127 10532:374 10637:98 10735:146 10799:101 11201:75 12781:79 12968:119 13161:111 13264:125 14376:74 14836:85 14880:115 14931:108 15206:90 15389:377 15476:82 15868:331 16576:96 17002:126 17471:215 17613:549 17785:91 18347:142 19248:56 +101:136 476:313 1040:46 1052:86 1889:73 1968:119 2025:94 2407:109 2990:103 3361:83 3377:14 3578:45 3661:60 3664:438 3820:52 3946:75 4698:295 4707:152 4790:55 4986:69 5382:434 5449:97 5458:79 6234:100 6274:172 6792:140 6866:436 7494:151 7709:113 8331:48 8799:20 9109:106 9683:82 9792:86 10035:129 10532:146 11106:113 11727:86 11871:221 12278:427 12406:129 12701:141 12877:138 12896:43 12916:6 12923:109 13384:107 13685:167 14750:122 14800:262 14883:89 15172:66 15215:63 15294:148 15535:150 15610:102 15798:172 16034:49 16227:72 16478:71 16810:23 17792:70 18218:106 18972:21 19049:82 19386:82 19460:79 19523:95 19726:82 +104:713 194:132 833:172 953:153 957:157 985:158 1040:49 1070:62 1483:222 1810:40 1970:48 2384:81 2419:100 2746:94 2805:79 2990:58 2999:61 3086:47 3578:16 3614:112 3712:117 3906:104 3932:54 5298:119 5390:146 5417:84 5660:112 5836:79 5877:114 5951:53 6303:143 6306:53 6319:99 6866:278 6955:159 7080:149 7494:140 7755:121 8215:58 8789:198 8804:82 9325:88 9460:97 9485:111 9596:97 9907:67 9987:91 10161:115 10241:168 10457:42 10532:80 10799:19 10999:118 11006:60 11106:32 11186:46 11261:27 11619:121 11781:62 11822:47 11871:93 11882:111 12108:117 12170:136 12191:59 12448:38 12466:86 12916:23 12966:44 12973:33 13210:29 13881:37 13929:28 14161:23 14358:77 14458:113 14749:70 14947:163 15664:8 15931:91 16325:66 16773:79 16826:109 16868:56 17523:75 17784:72 18160:63 18169:103 18364:70 18467:114 18540:313 18656:87 18750:292 18922:116 19172:70 19255:128 19272:91 19386:29 19932:106 19984:119 +346:106 1814:364 2572:65 2781:73 2934:75 3062:261 3084:52 3247:131 3457:134 3578:9 3876:101 3976:49 4281:173 4352:39 4699:63 4701:117 4826:88 4848:160 5057:56 5434:347 5831:47 5946:132 6666:53 6866:244 6868:128 7006:111 7287:51 7494:238 7542:435 7709:314 8081:25 8374:98 8488:164 8511:50 8515:58 9887:111 10161:4 10532:74 10848:182 11007:16 11342:58 11685:56 11704:77 11871:227 11878:121 12196:102 12238:43 12502:85 12674:42 12720:106 12760:163 12884:111 12916:2 13608:439 13881:74 13939:82 14015:85 14047:16 14369:37 14606:99 14682:79 15204:64 15448:119 15594:122 15853:178 16034:59 16223:60 16855:179 17300:179 17871:94 18115:8 18535:146 19109:115 19162:73 19421:138 19707:122 19787:72 19873:173 19928:35 19950:169 +58:74 280:102 492:76 773:74 821:106 865:90 1040:28 1057:34 1801:191 1889:92 2078:185 2630:124 2915:404 2923:211 3345:179 3460:122 3578:28 3882:105 4207:137 4473:116 4750:62 4864:151 5119:160 5633:363 5877:99 5946:43 6027:87 6347:127 6866:117 6982:84 7055:76 7360:33 7494:177 7560:129 7638:118 7709:124 8074:473 8801:133 9480:89 9711:39 10115:91 10241:119 10532:197 10718:59 10799:77 11106:73 11206:113 11514:235 11745:91 12057:98 12161:125 12238:169 12380:62 12529:60 12610:60 12916:7 12968:77 13685:206 13881:63 13929:96 14044:15 14305:88 14376:163 14766:21 14997:165 15204:270 15355:94 15411:95 16034:56 16269:78 16724:157 16761:199 16851:104 17357:190 17508:36 17806:104 17961:106 18011:65 18110:109 18364:60 18513:46 18764:135 18921:70 19174:66 19327:237 19386:51 19525:95 19622:24 19634:155 19707:81 +110:101 268:21 732:43 763:159 879:131 1828:61 1853:89 1889:163 2339:26 2471:43 2630:340 2840:110 2894:23 2918:92 2990:58 3260:246 3433:63 3515:71 3578:4 3716:43 4294:58 4517:85 4521:39 4573:76 4908:77 5229:115 5378:128 5665:50 5941:88 6194:69 6234:68 6306:124 6374:23 6866:382 7182:41 7379:9 7465:109 7494:34 7709:175 7771:75 7893:67 8203:41 8262:93 8333:31 8504:45 8515:81 9166:152 9424:60 9430:66 9475:133 9913:71 10048:84 10241:70 10276:39 10301:290 10404:59 10532:294 10628:21 10789:53 10848:36 11160:73 11175:27 11263:109 11383:116 11685:139 11745:130 11994:36 12087:135 12093:63 12104:44 12290:61 12379:36 12532:91 12783:119 12796:53 12916:32 13026:112 13050:46 13051:62 13864:169 13881:81 13971:70 14161:112 14245:97 14473:80 14594:89 14606:41 14733:32 14735:98 14784:57 15156:96 15414:43 15424:109 15583:258 15780:18 15785:101 15793:58 15876:178 16075:91 16104:64 16223:153 16432:30 16866:75 16876:67 16881:124 16950:60 17149:76 17173:21 17505:36 17761:116 18077:100 18694:41 18820:68 19041:148 19327:195 19604:382 19638:344 19645:47 19756:139 19809:29 19926:44 +626:338 664:458 1040:84 1057:64 1316:175 1681:115 1902:67 1914:125 1968:108 2194:174 2480:102 2847:13 2990:120 3630:38 3728:82 4427:88 5946:105 6815:94 6866:593 7709:103 9109:44 9157:181 9569:75 10161:118 10244:353 10276:106 10489:268 10508:51 10532:69 10799:108 11164:111 11303:66 11470:328 11871:158 11898:103 11994:193 12238:62 12610:90 12916:91 12968:67 13881:3 13929:71 14138:261 14161:60 14239:116 14543:75 14665:98 15346:89 15452:121 15630:67 15888:87 15927:21 16646:63 16797:44 17166:102 17487:211 17930:78 18688:175 18944:47 19189:121 19271:49 19676:78 19685:179 19737:93 19769:129 +255:29 572:67 967:60 1040:18 1057:57 1251:181 1356:113 1533:121 1824:118 2290:71 2934:173 2945:83 3558:123 3578:17 3614:115 3860:68 3976:97 4459:97 4492:181 4637:61 4750:28 5361:61 5507:71 5551:66 5779:40 5961:55 6153:228 6234:116 6306:49 6374:104 6672:87 6866:523 7055:47 7494:168 7596:383 7709:147 7742:101 8031:128 8078:313 8298:78 8303:137 8437:115 8488:68 8569:120 8842:72 9056:131 9109:29 9424:68 9771:182 10268:141 10357:33 10383:85 10532:82 10799:168 10872:83 10936:81 11085:69 11224:64 11322:68 11328:65 11409:75 11625:110 11663:24 11871:147 12188:353 12206:26 12448:41 12517:106 12529:43 12554:298 12781:58 12916:47 13229:84 13384:29 13402:115 13687:135 13829:153 13881:75 13925:122 13980:95 14319:145 14665:41 15295:94 15605:97 15726:92 15772:145 16019:82 17039:84 17053:120 17104:54 17173:49 17279:61 17318:82 17471:73 17824:75 18760:435 19022:57 19109:162 19174:41 19278:45 19473:62 19707:67 +4:95 571:89 634:245 865:127 1052:47 1203:367 1575:80 2180:96 2518:73 2528:104 2751:47 2901:81 2934:133 3280:71 3578:19 3988:55 3996:52 4347:621 4956:17 5244:85 5308:124 6234:96 6320:146 6615:68 6859:58 6865:45 6866:436 7709:122 7780:66 7896:191 7983:81 8064:47 8605:83 8759:78 8789:214 9221:53 9325:112 9594:49 10154:85 10241:28 10443:98 10532:186 10706:109 10754:121 10848:71 10874:94 10925:104 11079:179 11228:72 11358:84 11658:357 12238:96 12369:121 12432:128 12459:92 12488:69 12543:110 12698:108 12916:5 12959:113 12968:36 12985:213 13929:21 14167:47 14539:128 14800:71 14998:96 15213:75 15294:40 15458:47 15844:124 15957:57 16478:71 16516:131 16898:99 16913:152 17079:55 17961:29 19056:92 19174:29 19247:65 19255:134 19386:22 19468:64 19541:94 19730:113 +370:181 556:91 596:44 732:29 891:185 1040:32 1272:116 1715:38 2015:35 2751:109 3174:44 3468:80 3515:32 3578:5 3734:684 3857:86 3864:82 3915:55 4392:366 4526:41 4597:56 4614:115 4707:35 4711:56 4868:46 5057:62 5128:32 5305:75 5587:63 5959:32 6201:56 6234:127 6372:156 6469:67 6725:104 6866:300 7199:30 7363:32 7483:50 7494:304 7596:46 7709:162 7729:39 7850:116 8195:90 8475:81 8794:58 8824:97 8875:114 8913:43 8958:85 9306:89 9763:62 9891:66 9987:30 10176:80 10370:116 10535:46 10665:65 10788:104 10799:215 11422:61 11545:49 11609:65 11685:208 11822:62 11856:49 11869:11 12191:63 12238:150 12610:45 12632:112 12653:99 12916:7 12968:63 13222:99 13384:72 13653:117 13868:61 13884:120 13929:18 14161:31 14209:104 14325:40 15115:43 15294:68 15605:101 15640:83 15798:40 15869:47 16029:95 16112:55 16232:37 16302:90 16524:51 16817:61 16835:152 17343:55 17692:38 17696:233 17711:45 17896:75 18100:110 18160:41 18263:57 18356:68 18364:34 19109:52 19174:25 19327:59 19360:116 19896:101 19921:28 +257:91 437:94 985:100 1165:64 2178:79 2634:85 2718:109 2743:39 3012:40 3084:74 3578:22 3614:78 3654:90 3713:69 4494:11 4623:91 5081:53 5139:49 5660:301 6234:221 6449:58 6729:116 6865:53 6866:305 7080:118 7494:16 7570:122 7709:33 8223:505 8425:36 8436:146 8488:61 8548:38 8789:148 8945:61 9028:111 9148:100 9444:47 9870:89 9887:30 10735:41 10899:114 10936:50 11395:546 11400:136 12144:133 12610:31 13685:107 13929:50 15206:33 15492:45 15523:57 15535:40 15853:69 16022:451 16225:89 16353:77 17270:60 17575:123 17742:65 17961:107 18215:114 19210:87 19313:90 19386:26 19787:14 19889:63 19926:78 +1052:86 1648:66 1987:162 2446:162 2823:76 2990:116 3141:240 3221:360 3578:11 4981:147 5353:645 5446:110 5448:140 5484:12 5487:120 5712:99 5897:118 6282:17 6866:418 6907:107 7494:117 7709:199 8087:63 8703:84 8789:85 8940:58 9711:133 9808:57 9921:106 10035:73 10532:164 10860:115 11018:104 11106:23 11187:102 11532:98 12161:100 12238:121 12607:80 12610:49 12916:3 12968:96 13098:375 13368:88 13483:148 13661:67 13744:108 13859:46 14184:110 14474:86 14531:90 14880:36 15798:85 16747:71 16840:325 17204:104 17519:58 17841:47 17855:19 18344:63 18364:24 18575:40 19151:155 19718:201 19873:104 19926:99 +289:95 321:131 585:31 732:37 869:46 923:45 981:29 1052:13 1097:47 1273:91 1279:73 1537:61 1715:48 1746:157 1987:24 2194:28 2209:120 2335:74 2528:75 2635:37 2820:106 2847:28 2934:60 3578:30 3882:25 3946:118 4053:52 4126:43 4275:56 4472:33 4576:25 4908:133 5128:218 5227:77 5347:39 5367:37 5646:139 5698:41 5965:48 6212:134 6234:313 6662:50 6855:12 6866:372 7210:34 7257:147 7362:46 7494:199 7592:36 7709:323 7780:35 8215:24 8245:108 8298:30 8548:34 8759:115 8804:40 8913:54 8945:28 9044:54 9239:51 9325:102 9339:52 9453:32 9671:149 9824:30 9960:115 9998:31 10001:84 10161:122 10407:45 10443:167 10471:87 10535:29 10542:77 10744:26 10757:118 10856:35 11007:116 11106:72 11181:38 11224:73 11245:81 11358:45 11548:77 11877:27 12054:8 12147:59 12161:30 12173:139 12427:100 12610:29 12728:90 12916:14 12935:41 12947:73 13258:30 13369:95 13685:73 13829:88 13964:24 14067:21 14074:43 14158:25 14161:19 14332:59 14500:73 14697:23 14851:39 15104:47 15140:60 15226:33 15294:22 15375:51 15433:29 15586:29 15674:51 15757:54 15780:15 15869:206 16084:38 16159:37 16413:131 16478:92 16581:30 16584:20 16611:67 17134:46 17297:38 17343:114 17381:80 17530:50 17876:59 17904:34 18018:399 18281:119 18364:21 18372:144 18399:59 18684:41 18745:35 18768:57 18920:21 18964:86 18986:36 19095:80 19327:19 19386:84 19634:37 19694:672 19873:31 19925:93 19935:32 19947:95 +341:72 626:193 732:132 1052:90 1169:54 1215:23 1981:87 2367:150 2560:70 2934:262 2990:128 3056:16 3123:108 3537:183 3544:111 3712:139 3957:109 4683:106 5634:139 5760:144 6866:257 7334:6 7407:190 7604:85 7709:184 7739:144 8804:116 9053:468 9180:148 9530:20 9621:25 10241:60 10244:101 10276:121 10532:198 10746:79 10833:207 10971:111 11006:177 11400:97 11685:99 11871:121 12674:145 12781:64 12916:24 13529:155 13616:66 13881:91 14165:96 14325:167 14513:504 14556:144 14813:22 15430:55 15523:107 15535:35 16034:85 16810:6 17023:95 17884:75 18007:56 18037:229 18058:459 18397:89 18418:142 18920:81 19083:81 19143:98 19813:58 19953:130 +626:201 936:69 1040:200 1052:94 1057:117 1157:29 1483:32 1508:45 1633:235 2528:20 3314:276 3614:74 3866:101 4097:87 4710:93 5532:93 5724:10 5776:63 5783:60 6373:75 6666:17 6854:334 6866:120 6983:85 7494:135 7591:83 7709:192 8475:142 8789:75 8804:106 9006:84 10035:42 10127:154 10134:146 10241:324 10244:421 10532:214 10848:73 11085:132 11106:72 11109:42 11698:89 11871:86 11994:230 12232:103 12436:86 12704:46 12916:102 12968:76 13436:29 13657:104 13829:158 13846:51 14239:138 14835:95 15087:512 15622:123 15948:214 16092:72 16296:52 16810:176 16974:153 17487:251 19109:26 19685:107 19881:62 +315:131 797:89 996:111 1001:110 1040:109 1070:69 1691:33 1912:29 1981:53 2290:74 2643:28 2894:54 2934:111 2990:378 3174:150 3578:9 4281:57 4412:87 4427:67 4489:94 4755:75 4811:30 5335:42 5379:40 5507:147 5946:138 6060:198 6167:166 6234:100 6666:64 6859:85 6865:66 6866:576 7120:63 7287:41 7494:111 7510:50 7709:89 7739:55 8078:162 8081:75 8215:65 8312:39 8556:83 8786:643 8804:100 9070:199 9298:44 9782:106 10241:120 10532:268 10571:116 10600:510 10753:72 10932:101 11106:47 11295:65 11518:67 11671:135 11869:34 11871:114 12319:56 12882:136 12916:42 13140:162 13290:103 13351:177 13485:99 13829:42 13881:105 14158:67 14161:105 14690:159 14820:97 15017:110 15144:61 16464:74 16524:87 17329:5 17373:122 17805:109 19174:85 19255:69 19481:34 19645:109 19787:159 +101:35 160:95 203:26 303:28 512:41 580:80 587:98 596:48 985:137 1010:37 1040:12 1057:89 1070:22 1166:81 1244:115 1363:44 1981:164 2136:97 2339:20 2615:50 2746:34 2823:67 2857:36 2886:66 2892:62 2902:75 2934:69 3433:32 3578:12 3582:34 3678:69 4435:134 4614:16 4720:41 5081:21 5095:85 5253:27 5266:42 5361:20 5379:93 5551:22 5945:91 5959:35 6065:26 6067:52 6080:100 6099:32 6172:32 6240:59 6287:131 6596:48 6837:25 6866:517 7104:39 7306:109 7494:126 7709:278 7758:26 8016:116 8262:23 8755:7 8789:137 8905:31 9271:107 9275:46 9421:50 9529:95 9757:26 9856:579 9939:41 10161:90 10241:40 10245:58 10532:118 10640:30 10735:16 10799:38 10848:138 10936:39 11342:18 11514:534 11752:41 11797:51 11871:162 12007:30 12214:40 12448:27 12547:29 12834:43 12840:178 12916:4 13526:56 13881:194 13964:21 14161:8 14222:23 14404:19 14413:9 14501:24 14620:64 14697:40 14733:24 14899:14 15115:24 15178:35 15217:67 15294:19 15389:42 15390:117 15523:76 15586:25 15828:57 15844:100 16019:49 16084:33 16201:40 16464:24 16478:114 16550:269 16939:61 17272:48 17571:28 17678:104 17737:37 17771:32 17784:26 17829:130 17904:29 17912:59 17929:259 17970:19 18050:37 18323:76 18364:12 18372:97 18572:169 18688:25 18711:98 18779:87 18818:36 19089:44 19174:41 19255:85 19272:65 19282:60 19688:89 19778:34 19799:83 19973:92 +268:100 954:64 1646:7 1916:148 1951:69 1990:138 2025:48 2062:63 2661:149 2917:110 2990:78 3095:134 3296:143 3307:94 3308:57 3578:37 4113:103 4153:344 4205:113 4247:230 4460:101 4678:199 5128:112 5361:65 5417:84 6078:71 6153:61 6234:120 6315:87 6549:82 6559:119 6681:165 6889:132 7004:117 7098:171 7257:53 7271:132 7419:185 7467:22 7494:223 7527:80 7660:154 7709:307 7850:137 8025:115 8143:109 8204:92 8262:75 8325:17 8337:115 8488:95 8703:70 8789:126 8913:152 9006:21 9497:46 9581:32 9614:287 9819:94 10161:78 10269:89 10445:140 10735:53 11400:100 12297:99 12535:126 12720:106 12916:7 13147:72 13337:178 13881:43 14067:58 14194:158 14294:91 14733:79 15697:87 16034:156 16478:85 16551:130 17083:72 17160:33 17191:39 17428:74 18053:87 18100:97 18368:185 18413:388 18676:85 18800:82 19024:131 19038:199 19786:46 19882:131 19935:90 +80:105 492:55 503:36 732:23 776:39 1052:31 1685:62 1715:29 2015:27 2317:46 2523:69 2682:88 2718:61 2766:47 2990:61 3484:65 3505:55 3506:43 3578:24 3713:38 3741:61 4039:1 4218:94 4273:45 4328:35 4512:108 4576:31 4711:43 4909:57 5081:30 5128:73 5166:45 6172:45 6234:102 6315:38 6322:61 6449:33 6752:114 6866:301 7097:48 7494:227 7560:112 7709:91 7780:86 7926:50 7967:108 8262:33 8416:43 8420:101 8645:97 8703:30 9126:48 9386:66 9529:177 10035:45 10154:134 10158:156 10161:121 10350:48 10381:43 10443:85 10532:174 10733:15 10779:39 10814:386 10932:45 11106:103 11136:111 11447:74 11450:111 11545:38 11871:144 11877:98 11923:93 12054:131 12136:36 12344:104 12363:45 12607:58 12610:53 12692:48 12712:558 12781:73 12916:7 12968:6 13639:66 13685:30 13881:15 13891:41 13929:14 14015:61 14404:26 14521:71 14972:59 14987:42 15492:75 15523:101 16034:109 16211:47 16220:49 16225:50 16266:60 16312:558 16433:67 16464:33 16524:39 16651:48 16810:103 17049:43 17098:110 17120:166 17173:45 17211:43 17267:31 17318:38 17369:42 17555:142 17711:69 18353:131 18364:35 18912:48 18944:57 19104:77 19278:21 19327:160 19334:65 19386:15 19677:69 19926:98 +1040:140 1268:150 1796:159 1851:108 2134:154 2446:77 2894:103 2934:275 3174:288 3197:161 3352:331 3377:136 3425:46 3506:49 3578:17 3643:185 3829:79 3916:50 4376:144 4514:41 4573:83 5507:281 5594:10 6234:66 6642:87 6866:392 7494:313 7616:51 7709:65 7873:79 8078:155 8215:124 8786:616 9048:91 9782:203 10161:55 10532:166 10848:105 11078:1 11518:128 11871:103 11964:178 12238:84 12244:139 12319:106 12342:157 12916:18 12968:70 13202:111 13414:62 13868:50 14099:28 14820:185 14880:92 14883:28 14893:79 15177:45 15395:113 15641:131 16028:116 16029:64 16464:142 16524:166 16810:12 16864:101 17805:209 17807:32 17896:90 18115:98 18723:111 18800:14 18943:107 19174:163 19255:120 19787:88 +70:82 247:49 301:38 412:46 879:34 1032:87 1040:28 1052:26 1057:51 1316:125 1367:65 1448:16 1455:32 1549:54 1721:105 1951:102 2536:55 2591:78 2990:53 3093:81 3578:28 3746:124 3915:72 3937:120 3988:61 4308:60 5305:49 5454:102 5494:147 5664:87 6437:329 6577:105 6865:50 6866:329 7363:41 7459:154 7470:95 7709:90 7898:90 8218:130 8821:102 8925:17 9226:327 9328:145 10180:105 10206:58 10532:97 10538:143 10848:168 11022:49 11106:67 11125:70 11224:51 11328:52 11871:129 11984:164 12178:158 12238:33 12388:103 12465:128 12610:89 12674:162 12678:110 12798:84 12916:18 12935:42 13109:129 13402:107 13685:51 13881:190 13929:24 14161:40 14252:152 14851:40 15294:45 15344:693 15448:48 16278:114 16584:42 16721:109 16831:17 17017:64 17267:52 17982:30 18343:75 18364:30 18580:96 18624:162 18886:95 18973:58 19255:84 19386:50 19890:234 +53:44 174:103 203:27 268:81 413:33 496:493 568:47 640:43 685:109 692:78 732:51 749:35 1057:58 1135:48 1173:314 1263:45 1282:36 1395:72 1452:50 1525:43 1549:24 1798:39 1951:22 1987:22 2015:20 2062:21 2194:25 2290:50 2324:37 2389:33 2463:51 2538:133 2584:65 2717:35 2751:147 3174:25 3237:28 3296:163 3422:32 3491:91 3515:19 3578:52 3647:35 3696:38 3788:84 3860:71 4023:37 4045:43 4153:28 4208:153 4232:28 4355:49 4460:33 4489:128 4637:21 4909:85 5021:38 5066:51 5128:36 5131:27 5166:33 5298:162 5304:89 5305:22 5361:21 5537:130 5556:177 5612:28 5803:31 5867:26 5874:26 5877:21 5934:30 5959:18 6084:58 6153:20 6181:35 6234:286 6253:29 6374:73 6602:26 6642:143 6837:26 6866:143 6886:114 6896:83 7027:31 7097:36 7127:45 7362:43 7363:73 7384:43 7422:41 7494:49 7560:28 7632:38 7658:30 7673:36 7709:121 7729:45 7739:19 7746:86 7875:59 7893:27 7938:31 8025:38 8033:101 8078:95 8150:31 8151:60 8152:57 8293:44 8298:27 8416:64 8548:16 8636:92 8737:92 8786:27 9043:52 9166:48 9199:85 9233:56 9424:24 9614:70 9661:31 9707:73 9757:28 9814:33 9891:38 9906:74 9941:68 9987:17 10154:96 10218:82 10229:62 10241:39 10275:50 10277:80 10377:41 10381:97 10446:55 10457:24 10470:32 10520:35 10535:27 10620:45 10650:29 10735:17 10751:282 10794:35 10799:29 10856:32 10876:38 10925:25 11245:99 11292:57 11322:24 11346:57 11449:44 11571:111 11684:38 11685:41 11711:39 11712:104 11752:85 11952:44 11961:46 11964:31 11974:41 11981:50 12063:49 12139:70 12238:164 12298:67 12313:80 12319:19 12320:65 12378:68 12394:49 12406:119 12448:14 12468:56 12547:30 12601:26 12804:30 12834:45 12906:60 12916:7 12932:164 12993:67 13005:42 13029:46 13067:43 13114:103 13192:39 13450:52 13529:104 13634:72 13863:28 13881:158 13896:36 14032:56 14067:19 14197:43 14221:36 14404:20 14530:44 14665:14 14732:50 14820:33 14851:35 14973:21 15287:56 15301:34 15303:23 15492:38 15571:52 15642:41 15868:78 16094:33 16197:171 16208:95 16289:58 16294:238 16367:64 16406:31 16523:81 16581:27 16621:71 16678:35 16795:74 16798:62 16872:46 16997:22 17173:51 17238:56 17267:23 17270:104 17272:76 17318:28 17471:76 17493:35 17753:34 17859:90 17902:52 17952:33 17970:41 18050:39 18100:63 18314:55 18406:63 18413:95 18436:97 18499:34 18622:60 18713:59 18871:65 18907:122 18913:78 19017:45 19109:86 19117:28 19172:27 19174:14 19240:41 19282:43 19404:11 19429:32 19444:119 19483:45 19569:35 19629:40 19674:120 19739:48 19921:32 19973:24 +43:46 80:26 113:108 133:41 166:38 235:81 286:34 431:39 585:28 657:54 732:67 776:29 981:52 1033:45 1039:82 1057:72 1070:69 1150:51 1166:84 1454:38 1481:30 1691:171 1776:13 1951:22 1987:130 2275:72 2351:40 2407:114 2425:89 2612:46 2630:53 2670:61 2808:48 2897:27 2952:10 3066:169 3141:64 3148:83 3280:34 3578:30 3850:59 3965:720 3993:48 4273:100 4387:38 4425:72 4526:23 4554:44 4573:85 4576:160 4687:28 4723:20 4792:47 4857:30 4970:88 5081:22 5128:36 5227:35 5305:21 5596:28 5627:30 5631:53 5670:37 5749:134 5877:42 5965:44 6060:140 6065:54 6109:103 6181:69 6234:99 6253:29 6274:23 6629:40 6837:51 6866:113 6912:34 6917:24 7210:31 7248:31 7363:36 7483:28 7658:30 7748:118 7797:74 7836:5 7931:39 7938:30 7967:66 8082:45 8115:46 8214:93 8268:48 8475:99 8515:84 8605:40 8631:30 8703:67 8789:106 9006:62 9157:52 9166:24 9333:75 9424:23 9998:28 10089:61 10350:35 10377:40 10386:49 10438:66 10532:99 10732:44 10735:17 10932:34 11069:96 11074:33 11159:114 11259:55 11342:19 11422:35 11463:57 11698:52 11877:72 11969:36 12161:27 12387:46 12602:56 12607:171 12610:26 12641:38 12781:147 12798:163 12916:4 12935:18 13006:19 13028:118 13151:33 13337:29 13520:130 13588:39 13685:22 13738:59 13881:135 13929:10 14074:39 14167:23 14338:39 14375:216 14809:22 14948:46 15029:40 15084:54 15178:36 15298:61 15433:26 15586:26 15780:14 15798:227 15869:27 15925:184 15983:46 16022:47 16225:37 16298:156 16353:32 16433:65 16464:74 16515:76 16524:58 16682:93 16766:37 17017:84 17374:80 17457:27 17531:37 17691:54 17939:37 18065:56 18115:96 18146:48 18236:123 18364:19 18861:43 18912:36 18916:135 18986:32 19001:40 19085:37 19095:48 19117:28 19199:42 19327:102 19386:22 19499:56 19707:71 19809:22 19830:155 19873:28 19947:43 +626:138 1040:137 1465:108 1810:226 1914:102 2015:111 2185:78 2446:75 2491:48 2751:81 2785:22 3051:83 3551:67 4346:79 5045:225 5379:69 5783:68 6102:112 6234:152 6349:10 6866:264 7363:102 7494:125 7680:120 7709:248 7896:82 7968:83 8027:69 8151:126 8152:106 9948:77 10073:44 10244:289 10443:125 10457:44 10532:197 10735:96 10848:81 11006:126 11871:35 11994:157 12062:101 12238:216 12290:269 12616:73 12916:79 12939:60 12968:63 13881:85 13929:58 14239:95 14446:242 14665:80 14851:98 15948:146 16162:197 16810:225 16974:105 17079:302 17210:127 17330:102 17487:172 17745:82 18408:147 18770:51 18964:84 19187:128 19248:44 19282:187 19685:219 19787:85 19908:555 19926:107 +23:31 251:87 977:52 1040:70 1273:69 1316:122 1353:57 2062:118 2090:144 2290:142 2463:29 2751:95 2893:247 2990:164 3204:75 3308:210 3456:90 3578:17 4473:153 5417:117 5612:160 6084:493 6347:60 6368:155 6804:73 6866:400 7108:68 7223:103 7494:152 7709:291 7755:110 7951:87 8078:156 8215:124 8333:138 8342:148 8804:60 9859:133 10110:90 10134:49 10161:105 10848:171 11052:83 11102:105 11245:68 11278:86 11400:55 11871:46 12916:22 12968:49 13163:240 13881:46 13884:47 14639:267 14652:33 14733:292 14809:125 14819:11 14878:247 14973:365 15115:142 15605:26 16325:285 16576:112 17718:59 17899:122 18286:12 18779:70 19109:43 19118:62 19335:99 19463:81 19676:81 +448:88 520:78 829:57 1145:714 2282:55 2690:78 2709:128 3266:105 3643:104 4011:60 4385:106 5363:158 5379:101 5650:105 5946:23 5994:80 6270:47 6372:9 6374:55 6866:372 6933:144 7129:112 7248:94 7494:270 7709:104 7844:133 8099:63 8291:34 8548:95 8799:75 9109:56 9509:284 9740:115 9987:52 10161:31 10187:55 10276:47 10515:87 10532:20 10687:58 10799:127 10848:167 11666:75 11840:61 12206:56 12355:84 12448:43 12542:45 12704:131 12916:17 13745:52 13881:70 14087:522 14537:107 15115:75 15204:145 15378:20 15785:76 16034:128 16593:138 16638:204 16690:75 16763:176 17279:65 18115:71 19455:81 19680:106 19787:70 +40:281 101:78 188:64 441:30 464:126 474:41 616:59 962:138 1052:197 1268:45 1585:85 2048:108 2882:133 2934:95 3578:26 3586:138 4614:141 5487:95 5651:8 6199:53 6234:70 6859:122 6866:294 7494:165 7709:78 7729:97 7819:92 8078:58 8099:89 8324:142 8349:137 8945:109 9249:674 9331:128 9680:78 9923:217 10241:92 10532:212 10576:67 10665:159 10848:161 11187:120 11409:222 11698:205 11871:91 12704:97 12724:75 13305:126 13384:103 13929:44 14015:88 14284:132 14543:20 14844:108 14976:76 15294:84 15392:44 15849:88 16034:74 16401:5 16555:85 16705:140 16810:82 17102:143 18115:127 18766:24 18821:138 19207:108 19309:114 19359:72 19453:109 19756:312 19799:140 19926:91 19999:40 +360:61 813:80 1387:90 1715:306 1808:62 1987:307 2518:31 2737:73 2751:24 3456:131 3578:43 3755:138 3818:70 3976:38 4209:117 4425:96 4701:86 4868:105 4962:161 4984:49 5557:70 6060:88 6167:93 6234:70 6866:503 6909:123 8288:699 9501:79 9907:17 10048:77 10156:35 10443:83 10779:405 10848:60 11018:393 11871:160 11946:32 12206:102 12543:179 12884:88 13230:11 13378:71 13674:105 13731:55 13808:53 18372:83 19083:130 19926:79 +59:135 1040:27 1057:25 1393:171 1497:190 2894:81 3038:14 3176:90 3244:36 3266:87 3352:130 3578:41 3617:428 4237:100 4908:99 5001:119 5305:97 5830:65 5946:74 6067:122 6371:43 6532:148 6792:71 6866:578 6877:87 7306:87 7363:81 7494:112 7612:108 7709:445 7729:101 8078:607 8789:81 8799:108 9089:78 9207:61 9500:22 9984:140 10035:61 10394:108 10532:39 11069:120 11224:200 11358:184 11487:139 11518:101 11577:222 11778:70 11871:65 12237:184 12238:45 12599:107 12619:104 12916:25 13628:73 13829:102 13929:46 14195:125 14244:164 14844:225 14880:117 15204:30 15280:98 15294:88 15492:84 15844:68 16029:74 16159:152 16584:164 17220:100 17270:121 17528:59 17639:103 18364:29 18768:117 18779:93 18916:138 19255:226 19327:76 19386:49 19787:39 19801:39 19831:33 19889:117 +1052:50 1249:124 1834:125 2185:88 2219:77 2700:907 2990:181 3038:68 3075:147 3244:145 3488:82 3578:13 4831:41 5379:116 5890:124 6660:94 6866:127 7494:323 7686:80 7739:79 8548:67 8666:131 8789:19 8821:82 9379:256 9396:143 9698:132 9704:5 9887:30 10003:128 10035:87 10276:66 10394:143 10532:125 10848:183 11133:56 11181:15 11245:59 11557:129 11718:125 11871:43 12238:66 12282:103 12529:105 12599:136 12607:93 12704:100 12916:3 12968:99 12972:155 13881:75 14313:109 14556:79 14785:55 15387:63 15723:32 15777:92 16025:173 16031:91 16211:152 16478:75 17185:103 18319:89 18372:170 19217:182 19278:67 19453:123 19786:133 +748:109 1057:4 1968:130 2110:73 2524:72 2851:235 2950:513 3361:37 3507:116 3578:25 4334:413 5016:423 6313:71 6820:513 6866:606 7230:47 7325:21 7494:244 7709:185 7896:77 7948:55 8657:54 9594:45 9788:57 9834:94 10154:89 10161:23 10379:120 10532:177 10589:79 10737:66 10813:151 11524:115 11571:64 11871:84 12007:113 12352:23 12582:116 12958:108 13524:145 13884:39 14658:111 14823:95 15448:42 15820:51 16049:108 16478:138 16810:80 16826:42 16838:53 17395:157 17457:223 18009:99 18115:153 18261:46 18599:66 19120:103 19209:99 19293:62 +526:23 563:101 829:15 981:72 1152:77 1347:143 1370:168 1667:60 2173:111 2312:116 2347:86 2785:107 2872:61 3214:52 3336:149 3479:134 3578:8 3586:89 3598:83 3614:175 4029:100 4061:107 4376:87 4502:70 4526:65 4587:85 4777:142 4820:110 4826:78 4962:70 5043:84 5093:133 5304:74 5441:117 5959:50 6080:67 6102:85 6529:66 6866:122 7063:51 7143:54 7183:150 7494:200 7510:67 7709:121 8561:131 8789:69 9579:139 9987:47 10184:149 10241:66 10470:87 10532:27 11695:78 12448:116 12599:19 12607:59 12666:299 12704:101 12842:105 12913:54 12916:6 12942:136 12968:156 13130:112 13829:168 13881:143 13964:122 14226:71 14234:97 14578:79 14742:68 15071:322 15376:64 15378:122 15462:70 15777:89 15806:304 16345:40 16364:114 16464:68 16571:108 16691:170 17356:118 17813:451 18050:53 18403:115 18409:144 18818:102 18833:298 19041:80 19247:164 19278:43 19334:133 19372:87 19778:195 +1057:109 1070:88 1110:46 1312:41 1322:43 1889:46 1987:83 2528:135 2609:82 2751:57 2894:68 3043:135 3227:10 3242:155 3307:44 3484:91 3976:120 4464:119 4707:77 5128:138 5162:72 5298:98 5310:119 5596:107 5683:146 5946:61 6234:34 6306:95 6866:323 6995:163 7321:137 7596:404 7657:87 7709:177 8211:77 8488:56 8759:123 8784:111 8804:29 8925:19 9280:141 9596:207 9673:24 10182:475 10244:196 10364:126 10457:59 10638:42 10773:106 10799:89 10848:87 10943:40 11061:238 11164:61 11685:114 11778:78 11835:68 11869:82 11871:110 11987:131 12120:149 12213:131 12238:178 12469:107 12610:50 12732:86 12916:39 13529:89 13881:206 13896:271 14665:54 14809:83 14880:139 15340:210 15383:79 15523:74 15681:44 16034:136 16115:91 16188:160 16302:55 17167:194 17363:182 17365:55 17434:307 17442:82 18397:121 19172:100 19386:42 19685:198 19966:111 +133:54 268:42 285:44 298:61 413:43 533:99 561:44 1040:16 1575:51 1633:37 1704:78 1802:49 1990:29 2026:48 2062:80 2194:33 2339:27 2402:339 2447:51 2602:93 2726:47 2759:63 2771:110 2851:75 2888:207 2934:61 2990:78 3187:29 3314:44 3483:39 3484:31 3578:31 3614:78 3785:60 3908:70 4126:52 4473:175 4614:43 4637:248 4857:78 5449:211 5538:173 5551:59 5874:101 5946:172 6010:103 6060:99 6274:30 6285:70 6496:171 6689:118 6711:47 6866:274 6912:45 7055:64 7600:70 7709:129 7760:53 7850:87 7872:98 7947:38 7957:73 8099:27 8333:31 8362:41 8804:177 9021:48 9056:295 9172:63 9530:77 9605:124 9671:98 9987:22 10095:88 10231:145 10238:430 10309:59 10457:41 10510:12 10735:134 10825:59 11031:42 11197:66 11302:39 11322:31 11685:115 11871:152 11898:41 12105:49 12607:28 12616:70 12804:115 12916:63 13384:58 13491:106 13520:66 14161:332 14677:53 14749:137 14947:118 14973:276 14987:41 15112:66 15115:32 15264:113 15330:40 15472:135 15630:75 15669:43 15774:53 15793:29 15806:48 15901:158 16406:40 16770:33 16881:32 17046:57 17079:35 17083:31 17173:110 17536:61 17621:88 17949:37 18688:33 18715:21 18792:168 19109:103 19120:14 19210:212 19255:157 19787:148 19809:30 19926:103 19988:111 +1057:75 1115:124 1912:76 2185:65 2402:407 2934:140 2990:82 3405:78 3433:69 3578:25 4279:179 4494:103 4892:141 5204:110 5383:156 5526:54 5625:140 5785:119 6458:80 6861:90 6866:468 7477:136 7494:143 7709:79 7960:57 7998:65 8154:429 8431:65 8804:68 9526:130 9636:54 10035:156 10109:80 10110:99 10983:430 11164:139 11246:172 11322:294 11328:94 11518:188 11871:98 12130:44 12379:117 12406:10 12448:59 12674:37 12916:10 13006:79 13143:108 14161:37 14556:77 14682:70 14697:87 14754:185 14859:73 15259:73 15492:156 15777:50 15879:174 16325:104 16382:122 17083:98 17104:78 17189:140 17279:178 17896:89 17923:156 18191:141 19174:60 19255:65 19455:111 19921:67 +257:34 626:358 1040:178 1052:67 1797:76 1889:141 1914:106 2185:154 2751:63 2901:143 3100:117 3471:30 3876:23 3890:79 3916:92 4723:119 5417:212 5699:91 5946:91 6150:26 6234:122 6280:67 6866:480 7113:102 7452:141 7494:66 8152:110 8154:199 9006:138 9187:33 10244:449 10532:235 10744:139 10848:189 11164:60 11871:212 12170:99 12238:258 12355:82 12610:76 12704:63 12916:127 12992:329 13130:103 13285:187 13766:139 13881:109 14116:87 14239:197 14461:56 14563:92 14665:83 14724:90 15329:41 15523:163 15799:113 15859:226 16034:104 16433:84 16700:126 16810:65 16974:218 17487:179 17643:355 18122:139 18364:76 19193:115 19487:89 19685:228 19797:61 +520:88 879:8 1057:125 1549:27 1578:59 1683:4 1810:144 2075:126 2185:70 2186:42 2691:45 2934:91 2948:85 2990:94 3043:66 3539:43 3578:3 3584:92 3654:77 3823:146 4308:91 4412:34 4427:24 4541:455 4614:38 4896:455 5014:62 5081:25 5128:42 5379:165 5775:41 5877:25 6374:21 6392:82 6866:356 6917:28 6927:83 7453:69 7494:215 7723:45 7729:26 8501:118 8710:134 8804:99 8975:18 9446:58 9860:30 10003:110 10187:161 10241:97 10457:9 10532:142 10613:59 10703:519 11106:40 11224:51 11447:116 11487:72 11898:83 12136:63 12319:22 12448:16 12610:45 12704:1 12768:24 12916:9 13288:27 13338:70 14161:71 14427:427 14945:185 15047:85 16447:87 16810:62 16860:33 16936:33 17070:11 17104:65 17171:84 17328:41 17343:193 17711:30 18339:136 18372:109 18580:49 18752:105 18779:26 19386:51 19484:61 19758:48 19874:88 19894:19 +254:424 439:67 812:93 895:117 953:6 985:80 989:65 1015:97 1052:103 2612:415 2934:95 2990:37 3043:62 3071:76 3245:67 3513:30 4035:518 4225:72 4402:49 4489:103 4831:24 5683:91 6168:79 6229:117 6234:100 6347:168 6418:24 6792:112 6864:68 6866:365 6927:214 6987:89 7363:163 7494:104 7709:198 8475:64 8804:138 8856:55 8866:504 9048:26 9148:61 9282:73 9671:82 10443:155 11070:75 11224:200 11253:80 11695:22 11764:127 11871:347 12155:44 12206:90 12798:155 12844:169 12917:44 13384:115 13829:77 13881:91 14118:82 16902:79 17373:67 18545:68 18916:82 18948:130 19478:143 19546:89 19787:93 +101:51 106:718 247:60 520:71 1046:84 1052:64 1472:103 1584:99 1691:87 2240:88 2689:39 2934:168 2990:103 3578:17 3661:121 3988:75 4387:108 4450:105 4576:64 5041:60 5619:114 5946:94 6234:9 6374:50 6866:569 7486:81 7514:90 7709:103 7729:63 7896:66 8099:58 8544:112 8548:44 8644:148 8789:99 8804:115 10023:14 10328:79 10385:65 10532:63 10953:127 11040:19 11106:40 11400:30 11668:130 11699:100 11726:122 11775:46 11871:63 12238:43 12406:35 12610:109 12614:75 12798:96 12916:11 13881:133 13964:62 14660:118 14880:114 15153:89 15571:575 15780:39 15785:54 15798:128 15914:171 16997:60 17017:78 17270:93 17721:103 17741:117 17863:43 17939:77 18115:83 18364:18 19095:68 19255:102 19271:148 19278:44 19787:118 19996:139 +287:74 297:42 493:78 648:36 1054:104 1057:153 1448:64 1609:24 1880:34 1889:49 2630:212 2781:59 2990:216 3307:111 4266:83 4573:116 4612:65 5699:43 5866:450 6476:141 6508:72 6866:219 6869:65 7360:71 7549:85 7709:177 8217:154 8310:157 8413:98 8667:119 8813:96 8832:408 8862:102 8941:102 8945:201 9006:125 9697:124 9730:98 10003:79 10028:94 10043:137 10419:474 10532:96 10535:211 10626:95 10848:62 10984:408 11069:23 11685:36 11871:68 12222:90 12238:91 12661:97 13143:54 14236:97 15346:51 15523:94 15576:102 15777:55 16029:40 16223:46 16478:76 17247:87 17983:118 18481:94 18493:186 18559:270 19707:67 19805:80 +118:156 626:178 1052:33 1338:159 1851:86 2528:55 2652:88 2760:86 2934:58 2990:115 3075:102 3133:73 3266:114 3320:114 3361:104 3366:38 3578:17 3712:71 3881:39 4026:91 4826:82 5010:121 5103:113 5132:89 5231:84 5248:76 6234:124 6725:76 6866:156 7004:142 7325:93 7334:121 7392:99 7443:114 7494:140 7709:244 7842:239 7857:799 8627:61 8903:64 9006:105 9276:189 9382:94 10161:172 10187:68 10443:87 10532:259 10848:61 10849:167 11253:69 11566:59 11698:44 11745:91 11871:132 11964:90 12139:99 12319:108 12448:41 12529:105 12916:59 13711:48 14161:152 14606:92 14835:92 14848:139 14940:224 14958:37 15331:149 15374:84 15780:160 15793:131 16152:139 16593:97 16675:73 16798:137 17015:96 17250:67 17267:66 17753:98 18115:125 18364:94 18372:107 18901:70 19455:77 19951:107 +235:87 520:111 822:43 1057:73 1513:92 1743:196 1968:219 1981:82 2062:71 2185:263 2244:76 2538:60 2894:62 2934:33 2990:117 3244:77 3578:10 4113:34 4406:120 4512:144 4695:111 4699:78 4780:116 5253:99 5257:54 5523:106 5551:158 5712:12 5874:90 5956:126 6234:201 6866:305 7194:143 7443:137 7513:11 7709:125 7739:128 7820:114 8099:72 8215:150 8333:334 8374:109 8457:101 8804:140 8859:148 8943:78 9023:143 9117:89 9683:122 9726:127 9960:40 9989:85 10093:118 10457:54 10532:103 10618:49 10725:147 10848:57 10966:69 10971:156 11400:29 11871:75 11880:108 12487:107 12599:124 12674:137 12692:372 12704:121 12916:52 12968:56 13165:535 13210:171 13224:126 13234:112 13829:79 14161:153 14749:183 15017:93 15678:121 15772:87 15812:104 16006:155 16382:5 16810:40 16968:57 16997:74 17173:59 17526:117 17620:95 17905:78 18050:67 18115:61 18364:23 18493:81 18737:128 18916:111 19095:85 19109:51 19309:13 19415:146 19750:22 19982:148 +1040:32 1889:72 2121:34 2325:31 2932:25 2934:93 2990:126 3266:112 3578:16 3977:135 4062:38 4707:106 5128:95 5446:131 5561:96 6866:179 7129:95 7361:62 7494:93 7709:1 8488:52 8804:110 9253:91 9596:95 9683:61 9723:145 9960:114 9987:90 10430:184 10457:82 10532:127 10848:48 11106:108 11245:58 11400:56 11545:148 11685:82 11871:95 12087:125 12157:23 12238:79 12319:98 12599:13 12674:132 12798:114 12916:37 12968:102 13305:64 13485:114 13929:54 14194:75 14729:47 15390:119 15429:290 15886:42 15960:105 16006:118 16288:737 16745:48 16905:126 16996:94 17285:321 17952:172 18364:103 18383:108 18599:62 18623:86 18916:122 19100:63 19132:143 19172:277 19187:120 19517:132 19921:85 +125:91 626:311 755:39 1040:386 1643:110 2560:67 2899:36 3049:57 3244:24 3372:36 3982:42 4473:36 4573:65 4627:55 4757:85 4918:34 5128:114 5298:61 5473:22 5924:68 6195:45 6496:91 6866:224 7080:80 7390:83 7709:161 9550:81 10110:77 10241:50 10244:569 10532:96 10561:62 10848:93 10928:40 10933:30 11106:75 11164:81 11330:137 11685:51 11871:170 11898:77 12493:61 12674:111 12916:118 12992:179 13143:119 13217:63 13453:89 14239:107 14904:55 15376:17 15879:113 15948:165 16053:58 16315:110 16478:109 16593:61 16876:334 16974:236 17487:291 17579:79 18115:78 18879:74 19086:143 19109:131 19255:110 19685:247 19799:140 19926:73 +239:107 247:136 833:142 1040:77 1052:36 1551:210 1822:121 1968:50 2015:63 2751:157 2913:406 2990:87 3150:93 3361:80 3578:19 3584:98 3647:111 3713:90 4554:70 4612:87 4614:52 4663:95 4777:324 5128:57 5173:157 5280:79 5933:591 6234:26 6866:352 6927:75 7004:81 7080:64 7091:79 7494:207 7709:246 7729:71 7910:75 7977:203 8078:43 8099:65 8475:74 8601:115 8945:80 10404:146 10457:25 10532:177 10848:128 10999:94 11106:113 11400:69 11668:73 11685:46 11871:55 12238:75 12610:82 12781:17 12829:128 12916:10 13006:59 13143:93 13509:74 13829:70 13881:188 13929:65 14532:110 14665:45 14778:149 14844:158 15261:72 15280:162 15294:62 15627:51 15747:121 15798:72 16496:99 17519:119 17961:49 18364:62 18737:112 19174:45 19349:106 19386:34 19926:45 +55:52 626:138 918:76 1082:112 1455:107 1968:177 2934:85 2990:67 4085:61 5417:87 5487:74 5712:29 5804:100 5857:108 5946:145 6097:52 6866:598 7494:100 7682:85 7709:177 7710:404 8433:48 8664:97 8700:116 8789:80 8804:67 9300:127 9325:123 9328:103 9596:407 9800:75 10241:5 10244:433 10457:88 10532:104 10735:192 10799:135 10820:82 10848:60 11069:39 11133:67 11181:117 11267:130 12170:573 12380:183 12406:80 12431:123 12916:88 12968:76 13881:109 16478:45 16515:58 16710:94 17166:39 17185:159 17380:27 17487:172 18166:96 18365:73 18617:46 19247:58 19448:151 19685:146 19926:65 +313:129 936:25 1524:81 2218:11 2533:36 2934:107 3307:97 3491:110 3976:91 4308:275 5368:464 5379:19 5385:121 6102:108 6127:395 6372:172 6837:270 6866:600 6968:65 7483:301 7494:78 7709:265 8475:69 8710:140 8821:140 10161:103 10565:504 10732:70 10799:104 10914:51 10998:276 11164:110 11253:114 11699:149 11871:155 12191:100 12616:52 12916:8 12968:34 12982:108 13881:105 14161:92 14305:60 14325:166 14638:48 14743:75 14774:77 15231:62 15378:49 15672:112 16232:223 16478:77 18920:89 19309:101 19453:98 +431:53 508:63 520:38 533:61 543:80 662:115 891:36 981:71 989:146 1057:57 1070:32 1300:58 1622:62 1682:44 1707:66 1824:28 1906:140 1943:91 2185:38 2311:36 2339:85 2662:59 2663:86 2820:33 2832:47 2934:80 3150:120 3219:46 3361:171 3379:104 3433:269 3578:41 3849:80 4200:35 4205:50 4259:42 4286:58 4521:42 4601:61 4603:41 4614:22 4706:76 4720:115 4722:517 4865:61 4995:44 5021:51 5123:63 5166:45 5178:74 5225:69 5456:171 5518:59 5579:172 5743:48 5842:27 5874:35 5903:43 6036:71 6065:37 6169:57 6181:48 6234:139 6347:113 6406:95 6866:326 6876:56 6970:60 7051:40 7110:101 7249:65 7640:42 7709:174 7746:39 7908:198 8025:51 8055:98 8262:33 8298:37 8387:134 8471:58 8504:48 8580:125 8617:59 8703:124 8707:40 8789:89 8804:297 8817:70 8843:62 8923:134 9187:133 9325:107 9424:97 9440:52 9446:46 9614:285 9631:171 9885:46 9987:47 10009:46 10218:112 10319:38 10377:28 10457:11 10735:23 10848:12 10872:39 10888:93 10911:78 11106:49 11125:84 11136:57 11168:35 11176:71 11193:66 11346:78 12314:59 12378:46 12582:125 12916:1 12968:272 13111:155 13140:74 13688:46 13881:40 13891:42 14020:76 14131:42 14150:64 14161:12 14207:101 14375:30 14404:27 14509:71 14576:76 14733:35 15020:53 15303:31 15342:53 15369:48 15427:37 15492:26 15513:64 15605:57 15739:78 16094:44 16389:59 16406:42 16523:55 16733:93 16797:28 16810:124 16839:120 16912:35 16995:64 17046:60 17233:54 17272:69 17328:48 17571:40 17584:84 17818:45 17961:127 17970:55 18190:126 18253:76 18451:67 18576:64 18792:119 18867:158 18912:49 18916:59 19001:55 19097:70 19174:39 19355:80 19597:61 19739:197 19783:57 19921:22 19935:79 19973:98 +101:39 239:19 268:207 596:53 633:58 732:72 1060:91 1285:41 1481:64 1525:92 1549:101 1864:19 1990:48 2112:107 2202:111 2726:130 2990:70 3011:63 3307:75 3308:39 3422:405 3578:26 4541:337 4573:79 4584:148 4637:45 4896:337 4962:85 5361:45 5660:89 5724:148 5959:39 6060:23 6306:60 6344:95 6469:91 6866:236 7004:91 7055:35 7320:62 7494:56 7658:64 7709:166 8078:290 8262:104 8548:33 8586:87 8789:93 8812:93 8986:56 9166:153 9446:36 9771:67 9788:133 9824:59 9923:54 10011:121 10241:99 10320:210 10377:43 10441:66 10443:124 10532:19 11380:48 11791:82 11871:183 11958:69 12221:115 12319:40 12379:180 12547:65 12916:60 13050:77 13165:47 13498:109 13687:82 13829:98 13937:71 13964:47 14067:40 14144:78 14161:131 14167:49 14201:300 14530:93 14973:45 14982:130 15303:48 15772:54 16084:74 16169:98 16186:88 16223:39 16232:136 16239:92 16510:88 16532:90 16810:101 17126:169 17173:144 17220:104 17243:38 17270:132 17499:48 17574:109 17640:122 18397:133 18623:91 19109:43 19999:37 +327:59 571:71 657:44 962:52 1040:30 1052:37 1057:83 1222:82 1525:840 2275:59 2715:66 3307:64 3578:34 3586:105 4021:162 4188:41 4328:42 4383:52 4484:66 5041:35 5057:58 5417:103 6027:58 6234:103 6551:128 6866:40 7004:19 7005:82 7028:131 7051:330 7210:51 7494:188 7560:45 7709:281 7760:66 7896:19 8203:52 8282:85 8645:104 8789:50 8913:40 9325:71 9446:27 9715:51 10241:112 10269:39 10290:1 10292:103 10309:73 10448:60 10470:51 10712:47 10744:39 10799:47 10848:50 11018:45 11106:13 11190:39 11224:36 11328:37 11695:116 11822:29 11998:134 12007:48 12209:67 12448:23 12457:71 12466:52 12599:118 12610:148 12891:6 12916:24 13211:107 13384:72 13475:123 13515:121 13741:144 13964:72 14161:57 14167:37 14382:69 14851:28 15071:63 15492:30 15748:77 15853:47 16239:84 16389:116 16433:111 16810:69 17039:47 17267:74 17896:90 17904:50 17939:87 18089:78 18364:32 18539:15 18986:53 19109:71 19255:74 19327:138 19525:196 19774:69 +101:43 791:64 1057:165 1268:150 1473:17 1968:38 1987:52 2528:123 2722:107 2934:152 2947:78 3122:92 3361:45 3578:14 3661:59 3788:57 3976:75 4232:128 4576:55 4675:36 4707:48 4774:320 5047:545 5132:96 5304:113 5305:52 5361:101 5442:81 5491:442 5757:146 5791:74 6050:136 6187:479 6234:163 6668:30 6866:449 7709:211 7877:88 8349:76 8407:70 8417:89 8722:262 9341:151 10161:92 11358:98 11685:84 12916:9 12968:61 13829:154 13881:63 13929:25 14161:42 14851:84 16020:34 16310:41 16382:70 16576:168 16584:88 16797:40 16810:123 17053:99 17070:84 17185:146 17342:35 17365:80 17488:75 17990:81 18752:47 18768:63 18779:100 19024:78 19291:66 19327:41 19342:102 19926:150 +118:137 247:243 268:121 569:46 626:69 732:32 1040:23 1052:22 1987:122 2493:213 2609:53 2717:129 2934:88 2990:113 3308:34 3483:56 3578:45 3593:427 3643:58 3770:186 4317:16 4473:66 4576:43 4729:427 4826:53 5417:106 5633:294 5646:80 5698:70 6274:43 6315:53 6347:96 6449:46 6866:299 7709:392 7729:42 7788:73 7893:50 7894:84 7974:103 8333:90 8548:176 9011:101 9116:68 9234:62 9276:75 9379:335 9711:93 9862:103 10154:109 10241:85 10264:65 10271:33 10532:60 11036:59 11106:45 11695:136 11822:33 11871:77 12238:187 12595:64 12610:122 12732:127 12916:19 12918:47 13062:47 13337:108 13352:125 13679:127 13881:43 14161:33 14429:153 14576:142 14809:41 15055:123 15433:50 16034:96 16705:61 16881:46 16933:263 17153:86 17173:95 17235:125 17343:65 17961:159 18037:55 18943:107 19261:69 19386:164 19643:82 +80:92 99:52 276:110 287:67 416:35 431:34 439:23 585:25 657:24 962:28 1040:5 1057:54 1070:20 1157:45 1166:50 1263:40 1362:102 1515:65 1549:21 1564:52 1620:4 1715:57 1760:43 1802:33 1852:37 1889:112 1968:28 1981:40 2275:32 2311:23 2528:123 2530:29 2652:53 2698:42 2746:62 2751:95 2764:36 2894:16 2934:151 2939:46 2990:112 3086:16 3237:25 3308:16 3483:26 3484:21 3578:24 3586:28 3896:25 3932:143 4308:69 4554:59 4557:119 4614:14 4701:94 4795:36 4926:101 5041:19 5128:32 5131:23 5231:26 5253:25 5361:37 5379:108 5383:33 5507:43 5612:25 5803:27 5874:23 5890:25 5965:39 6067:24 6114:46 6167:80 6201:56 6234:284 6253:25 6319:33 6554:26 6771:37 6829:32 6859:25 6866:355 6955:52 7005:116 7052:35 7055:14 7097:63 7248:27 7250:27 7320:332 7494:206 7549:70 7560:25 7656:52 7709:365 7947:25 8078:12 8082:40 8115:41 8148:89 8194:30 8203:28 8214:27 8298:24 8306:29 8416:28 8476:44 8703:20 8707:26 8799:56 8897:29 8913:22 8945:22 9021:33 9328:85 9596:32 9707:64 9715:56 9757:73 9776:27 9814:29 9987:15 10024:38 10170:36 10260:36 10276:14 10286:22 10323:44 10350:31 10457:41 10532:109 10535:47 10620:20 10848:29 10872:51 10939:48 10947:77 11009:82 11018:25 11111:38 11140:27 11196:104 11245:65 11255:44 11349:47 11409:23 11422:31 11668:20 11711:34 11781:41 11822:31 11871:33 11872:44 11952:38 12120:35 12136:24 12170:30 12214:36 12238:59 12448:37 12477:64 12610:69 12680:124 12798:97 12817:76 12916:21 12935:33 12968:89 13224:106 13366:40 13381:31 13399:48 13479:39 13518:90 13519:31 13529:77 13562:126 13582:45 13645:85 13742:33 13863:24 13881:57 13885:26 13912:42 13929:9 13931:26 13964:39 14134:47 14161:31 14169:47 14325:93 14351:31 14665:50 14697:18 14749:23 14844:66 14851:15 15226:26 15249:183 15265:147 15294:17 15303:20 15374:26 15427:48 15429:24 15548:43 15580:34 15621:100 15642:36 15678:96 15780:61 15793:40 15798:20 15869:118 15980:27 16034:96 16053:54 16215:719 16248:82 16325:22 16398:31 16432:21 16459:201 16479:91 16624:80 16705:29 16711:141 16810:105 16913:22 16997:19 17104:33 17125:57 17267:40 17272:44 17331:41 17508:98 17644:26 17711:45 17715:45 17737:34 17949:25 18050:17 18364:63 18372:39 18768:23 18821:28 18920:107 18964:126 19095:22 19172:23 19174:38 19210:16 19219:32 19327:90 19353:30 19386:19 19455:23 19473:19 19512:90 19596:44 19685:11 19758:78 19774:56 19823:66 19953:7 +151:71 225:109 737:603 833:5 1040:46 1383:85 1386:140 1479:89 1968:40 1981:79 2213:73 2240:79 2747:96 3150:74 3578:15 3751:218 3976:60 4554:56 4623:95 4644:137 4707:51 4764:145 4994:96 5081:56 5577:81 5586:110 5760:70 6101:38 6234:198 6469:118 6847:89 6866:292 7494:88 7709:131 8020:86 8306:82 8789:35 9043:65 9117:69 9180:48 10187:64 10532:110 10735:86 10848:89 11384:157 11530:40 11667:419 11685:93 11822:134 12137:154 12238:206 12380:65 12463:86 12795:182 12916:4 13355:109 13881:64 13929:52 14353:65 14375:55 14458:79 14844:127 15549:79 16353:81 16478:121 16584:93 16610:1 17017:71 17067:165 17197:123 17249:153 17267:58 17464:87 17863:104 17939:135 18115:81 18314:69 18359:168 18364:49 18489:354 18697:105 18734:155 19139:83 19889:66 19900:111 19921:122 +626:262 1422:74 1448:54 1914:97 2934:167 3334:99 3958:59 4757:114 5937:133 6234:173 6866:648 7129:111 7306:35 7471:141 7494:84 8789:12 8925:102 9331:159 10187:79 10244:412 10532:102 10753:75 10773:149 10848:58 11069:16 11303:111 11504:57 11672:136 11685:43 12796:70 12916:100 12992:151 13103:24 13191:399 13285:172 13468:490 13881:55 13982:130 14194:73 14239:90 14665:76 15523:100 16360:28 16375:101 16974:200 17487:164 17509:54 18074:59 18314:291 18981:148 19109:77 19172:141 19293:127 19685:208 19774:113 19926:94 +257:97 1057:217 1298:303 1362:54 1564:49 1565:101 2463:58 2844:70 2990:105 3339:86 3564:401 3578:13 4317:85 4826:123 5760:88 5956:49 5961:89 6306:43 6352:545 6373:79 6678:119 6857:97 6866:353 7081:126 7494:160 8011:113 8160:82 8342:168 9363:126 9497:3 10187:104 10401:511 10532:242 10569:170 10799:118 10924:121 11106:14 11400:214 11642:137 11695:178 11704:109 11871:102 11898:61 12238:162 12916:20 13867:66 14161:38 14344:109 14385:139 14476:131 14729:51 14852:59 15125:67 15226:131 15605:146 15780:121 16860:125 17173:74 18364:57 18893:100 18942:273 18943:112 19249:94 19271:111 19992:57 +247:58 2004:93 2325:61 2480:33 2634:190 2934:14 3217:35 3578:16 3801:68 4429:41 4576:62 5447:87 5832:112 6097:30 6102:116 6372:71 6449:65 6866:657 7129:30 7363:49 7709:210 7729:60 8099:56 8627:72 8774:98 8789:39 9180:51 9325:99 9683:38 9859:90 10025:95 10241:152 10532:60 10779:154 10936:120 11400:167 11668:62 12127:894 12238:125 12529:124 12558:24 12582:107 12610:70 12916:8 13186:138 13259:68 13929:55 14161:24 14325:62 14416:160 14576:74 14925:115 15411:89 15492:150 15777:205 16086:68 16776:59 16922:179 17017:75 17731:9 17824:70 18104:27 18364:17 18894:73 19210:97 19278:42 19386:29 19889:140 +314:60 520:29 539:79 809:331 833:139 848:136 989:183 1057:82 1481:44 1771:106 1889:32 1968:23 2070:109 2213:42 2503:284 2952:5 3122:93 3284:97 3361:87 3490:110 3578:13 3785:67 4153:40 4185:91 4232:57 4308:76 4707:29 4877:88 4995:47 5808:101 6234:16 6372:149 6566:173 6715:582 6866:294 7055:24 7494:157 7681:155 7709:315 7751:485 7774:14 7850:32 8024:62 8151:26 8211:101 8433:86 8518:37 8556:130 8789:41 9163:87 9565:176 9747:79 9926:94 10004:46 10161:183 10218:79 10264:50 10457:11 10532:114 10564:46 10735:50 10848:44 11487:45 11898:87 12191:104 12380:31 12792:87 12916:11 12968:56 13881:71 14161:51 14184:75 14556:54 14566:129 14909:44 15374:42 15605:99 15931:138 16019:133 16112:46 16478:53 16584:53 16627:9 16936:42 17096:66 17102:107 17140:124 17225:94 17747:28 17753:99 17818:95 17922:6 18010:186 18114:158 18160:69 18559:99 18560:266 19109:70 19180:30 19386:48 19415:37 19569:50 19707:78 19724:66 19921:23 +4:177 247:82 400:73 454:76 676:435 713:66 1316:16 2160:17 2213:55 2319:103 2376:70 2751:207 2792:95 2934:123 2990:109 3084:102 3337:110 3458:100 3578:23 3876:68 3882:86 3988:51 4473:120 4614:31 4652:105 5128:35 5361:40 5417:79 5428:73 6347:143 6404:76 6865:84 6866:270 6927:46 7051:55 7223:72 7445:595 7483:55 7494:135 7709:18 7781:86 8548:119 8791:123 8975:134 9004:86 9316:76 9800:41 10366:55 10443:75 10532:98 10925:48 10971:115 11202:50 11224:170 11877:46 12280:107 12511:63 12564:135 12610:75 12632:176 12798:73 12916:4 12968:127 12994:435 13352:76 13384:55 13390:75 13685:43 13929:39 14161:17 14222:45 14434:63 14517:129 14800:67 14851:34 14880:134 14986:90 15316:107 15777:57 15853:55 15917:72 16228:54 16268:77 16478:164 16496:120 17869:67 17899:42 18481:136 18599:94 18768:100 18944:5 19386:84 19634:64 19757:74 19873:53 19926:159 +67:111 75:89 80:92 247:38 260:44 548:55 569:50 1153:52 1424:19 1715:76 1978:85 2185:77 2339:36 2764:72 2805:61 2808:85 2925:94 3065:64 3201:65 3433:29 3578:32 3713:50 3965:635 4205:64 4328:45 4345:69 4512:138 4576:121 4687:49 4792:83 4838:141 5238:111 5631:93 5682:95 5877:75 5946:257 6080:48 6577:174 6866:289 7363:64 7371:122 7392:88 7483:100 7560:49 7709:34 7797:65 8164:69 8217:29 8268:85 8324:72 8337:65 8469:147 8488:95 8548:27 8703:79 9043:45 9126:63 9368:80 9424:83 10241:189 10276:81 10735:30 10753:164 10956:105 11009:80 11074:58 11106:74 11435:110 11588:46 11668:41 11871:212 12363:58 12400:57 12547:159 12607:38 12610:23 12706:69 12916:7 12935:32 12968:115 12969:225 13143:66 13152:189 13519:61 13540:88 13653:59 13685:39 13688:119 13929:18 14067:33 14161:15 14375:153 14376:130 14697:37 14927:79 15029:71 15389:77 15433:46 15748:84 15798:160 15925:195 16094:57 16464:43 16576:117 16646:61 16682:77 16810:217 17017:49 17248:103 17270:122 17691:191 17859:78 18224:61 18364:11 18372:198 18731:109 18986:57 19095:86 19117:49 19220:90 19327:149 19386:19 19499:196 19707:134 +40:54 659:47 781:74 1040:30 1070:38 1277:113 1298:57 1520:66 1529:52 1565:265 1951:37 3156:71 3266:56 3296:38 3507:112 3578:25 3586:53 3614:78 4129:164 4707:66 4751:52 5162:78 5199:323 5775:58 5777:55 5868:89 5900:56 6071:133 6234:127 6258:175 6319:61 6774:616 6864:98 6866:97 6912:56 7051:48 7055:54 7709:105 7760:134 8160:156 8333:39 8362:51 8416:52 8608:25 8703:37 8810:3 8817:42 8897:109 9445:105 9459:99 9899:89 10409:184 10433:32 10457:13 10620:37 10626:127 10848:84 11142:43 11214:84 11229:5 11871:65 12056:154 12104:57 12560:79 12677:66 12781:94 12815:251 12916:26 13203:95 13263:134 13293:103 13653:220 13663:110 13829:148 13896:59 14067:31 14158:37 14161:14 14513:106 14697:34 14851:29 14969:69 15448:97 15627:113 16029:109 16478:161 16530:137 16724:111 16745:102 17105:161 17200:83 17771:56 17780:77 17896:87 18298:129 18356:64 18364:11 18718:91 19023:89 19147:153 19154:76 19172:87 19187:37 19309:143 19565:100 19787:92 +46:199 89:23 101:12 151:19 191:88 271:34 286:23 339:150 348:25 370:484 476:28 502:26 596:17 612:98 615:20 640:28 679:354 711:53 749:23 874:37 891:35 962:21 981:17 1040:33 1143:51 1244:80 1282:24 1313:112 1518:33 1565:92 1615:22 1682:22 1802:25 1805:31 1824:41 1952:19 1968:21 1990:45 1999:28 2025:97 2062:41 2240:42 2290:16 2339:56 2389:22 2460:45 2640:42 2690:17 2927:19 3051:31 3089:78 3187:15 3219:67 3296:15 3308:12 3352:19 3384:28 3400:29 3433:44 3484:16 3500:29 3578:26 3798:64 3833:36 3882:15 3916:174 4099:61 4247:25 4328:17 4383:106 4391:41 4412:20 4467:45 4472:41 4511:23 4558:36 4574:28 4601:30 4603:20 4614:11 4722:84 4907:100 4917:36 4976:27 4993:27 4995:22 5041:14 5043:20 5057:36 5088:105 5128:36 5131:18 5178:36 5233:55 5253:38 5448:25 5497:32 5551:15 5584:23 5874:17 5877:14 5903:42 5945:79 5952:101 5956:133 5959:24 6067:18 6153:53 6158:39 6431:58 6511:27 6602:34 6615:136 6662:31 6712:29 6846:157 6848:32 6866:298 7055:11 7090:30 7182:21 7299:61 7363:24 7510:46 7514:43 7530:79 7709:302 7739:12 7746:19 7765:135 7797:25 7815:34 8152:13 8216:32 8218:57 8298:18 8304:37 8342:181 8374:42 8416:43 8631:20 8737:122 8945:17 8964:30 8986:35 9109:159 9143:97 9166:16 9180:13 9208:42 9227:30 9398:27 9509:31 9596:24 9757:18 9782:24 9798:33 9814:22 9825:66 9923:17 9998:38 10043:112 10050:39 10206:34 10238:220 10319:19 10320:22 10328:80 10377:41 10408:23 10446:37 10457:63 10532:45 10620:135 10631:23 10640:21 10735:103 10744:48 10794:23 11168:34 11212:245 11224:15 11258:30 11322:16 11328:15 11342:75 11380:15 11426:40 11463:85 11479:36 11515:21 11781:62 11841:29 11871:271 11966:28 12097:91 12239:74 12413:28 12448:19 12463:113 12473:27 12528:27 12530:69 12571:37 12720:23 12916:17 12917:13 12944:36 12968:47 13194:27 13258:37 13260:85 13288:15 13407:19 13433:22 13467:32 13600:70 13726:28 13738:20 13742:25 13781:66 13885:39 13931:20 13964:15 14067:13 14244:49 14404:26 14418:29 14501:33 14551:55 14627:95 14734:177 14940:26 14983:140 15026:30 15102:25 15115:16 15129:34 15178:25 15192:31 15237:41 15294:13 15303:15 15396:163 15492:25 15525:24 15586:18 15617:34 15708:29 15713:35 15772:34 15780:157 15853:19 15921:30 16016:36 16131:30 16232:14 16248:21 16286:76 16367:21 16398:24 16406:61 16429:32 16432:31 16523:53 16524:19 16552:40 16779:31 16851:85 16912:52 16915:39 16947:23 16966:146 17045:35 17095:80 17104:13 17173:56 17196:159 17270:48 17272:17 17369:21 17457:18 17471:17 17575:93 17578:34 17761:31 17771:23 17824:17 17894:52 17921:45 17949:38 17970:94 18043:57 18050:26 18115:61 18227:22 18364:4 18380:41 18388:27 18397:59 18527:53 18594:20 18622:20 18688:17 18711:23 18745:21 18964:107 19039:36 19041:19 19068:31 19095:16 19109:134 19172:18 19173:36 19174:76 19187:15 19210:108 19255:205 19277:34 19366:111 19372:63 19429:22 19629:27 19634:22 19739:16 19873:19 19882:28 19921:11 19973:16 +133:149 572:94 626:89 685:42 1040:266 1052:42 1094:109 1914:66 1968:115 1981:50 2185:155 2690:93 2990:123 3096:44 3254:52 3478:52 3590:150 4021:62 4450:169 4614:60 4637:77 4723:224 4971:255 5361:77 5417:63 5577:117 5951:138 5956:39 6234:76 6866:247 6927:87 7257:148 7648:112 7709:261 7770:99 8077:105 8548:57 8759:23 8804:98 9045:202 9596:263 9766:131 9804:73 10110:79 10244:328 10260:148 10381:116 10457:114 10532:161 10735:62 10914:46 11330:79 11501:41 11545:103 11709:111 11822:65 11868:81 11871:19 11884:133 12040:116 12170:433 12610:47 12704:15 12732:82 12916:128 13079:37 13165:80 13675:93 13881:233 14161:64 14239:62 14665:52 14682:62 15077:199 15230:19 15578:90 15812:109 15910:80 15948:95 16006:81 16029:117 16832:62 16905:56 16997:78 17190:171 17487:168 17621:82 18352:116 18364:189 18800:134 19473:78 19685:237 19787:58 19938:67 +388:157 808:62 1052:39 1057:189 1268:7 1294:67 1401:162 1522:155 1557:74 1835:67 1987:74 2053:148 2672:49 2720:52 2777:173 2872:60 3266:91 3307:43 3314:343 3560:193 3578:20 3791:88 4328:87 5021:128 5937:124 6027:82 6301:44 6695:202 6837:87 6866:241 7494:142 7709:219 8002:193 8078:184 8195:175 8615:120 8804:64 9664:64 10161:156 10172:53 10241:87 10443:104 10457:26 10548:183 10628:115 10741:193 10848:37 10998:178 11020:57 11206:75 11400:175 11551:143 11704:52 11971:103 12607:146 13485:3 13881:98 13929:35 14067:64 14543:180 14915:110 15332:135 15387:110 15433:90 15605:106 15791:58 15798:78 15807:156 16186:417 17083:397 17870:41 17930:83 17961:75 17972:218 18071:83 18166:84 19150:59 19255:64 19386:37 19420:103 19459:82 19512:77 19787:94 +415:105 732:33 1040:48 2240:62 2530:64 2634:70 2990:107 3337:57 3378:132 3461:126 3578:6 3949:66 4494:13 4554:44 4580:109 4707:40 5163:100 5231:115 5449:81 5956:123 6168:99 6234:162 6725:148 6754:53 6866:101 6917:97 6955:59 7051:57 7086:124 7246:29 7494:283 7592:65 7729:45 7748:100 7756:108 7976:161 8050:122 8099:41 8152:37 8507:123 8548:31 8773:89 8826:311 9180:37 9276:126 9511:87 9792:40 10361:89 10443:59 10848:139 11400:67 11871:150 12242:112 12504:38 12610:77 12704:95 12781:144 12893:866 12916:15 13355:60 13881:104 13929:20 14099:87 14969:83 15059:57 15294:78 15492:74 16524:57 17003:53 17017:56 18364:64 18715:101 18800:74 19095:97 19309:59 19784:98 19809:90 19889:52 19925:84 +151:82 626:107 865:96 1040:124 1268:104 1691:105 1914:53 1968:138 1984:78 2007:101 2471:41 2522:261 2990:142 3051:104 3086:103 3176:105 3307:170 3860:69 4545:95 4637:61 4723:60 4757:63 4838:82 5071:124 5125:80 5129:99 5163:59 5201:129 5473:89 5577:94 5751:113 5760:66 6060:116 6234:103 6259:126 6394:105 6477:55 6494:77 6866:188 7280:43 7494:75 7709:162 7893:154 7970:129 8152:55 8488:107 8799:160 8972:37 9276:67 9446:49 9460:90 9511:65 9596:264 9870:108 10170:119 10235:33 10244:150 10457:68 10532:95 10637:176 10735:50 10799:118 10812:106 11330:63 11631:80 11781:68 11822:103 12105:119 12170:148 12610:190 12616:79 12902:34 12916:91 13013:63 13384:126 13863:80 13881:86 13929:30 14239:98 14310:89 14662:607 14665:42 14697:61 15980:181 16028:110 16705:95 16851:86 17003:77 17705:51 17711:149 18115:117 18364:95 18943:178 18964:75 19138:165 19327:149 19386:64 19685:266 19926:61 +101:66 140:76 285:60 327:65 821:31 1040:11 1100:79 1691:109 1968:29 2141:80 2281:149 2436:96 2718:82 2934:189 3244:35 3578:16 3586:58 3664:53 4232:83 4494:88 4614:30 4646:54 4751:58 4765:63 5041:39 5247:150 5424:85 5783:96 6158:57 6274:41 6300:134 6866:206 6936:41 6971:18 7087:323 7494:201 7709:121 8637:32 8804:159 9664:47 10241:94 10724:141 10735:31 10744:43 10799:67 10848:134 11160:104 11220:84 11245:105 11822:32 11998:110 12007:108 12238:174 12355:86 12564:64 12676:616 12916:1 13057:65 13151:60 13561:69 13685:80 13929:19 14404:35 14481:43 14501:44 14800:63 15058:67 15065:90 15605:89 16389:71 16515:73 16705:59 16713:60 16773:53 17272:46 17583:100 17896:78 18364:12 18838:62 18959:634 19174:26 19386:40 19999:23 +422:52 520:77 879:189 994:141 1779:77 2075:349 2185:100 2240:101 2446:41 2886:55 2990:53 3433:53 3578:5 3768:6 3802:100 4383:51 4541:315 4576:74 4614:53 4896:315 5128:58 5280:70 5504:90 5511:59 5612:45 5900:55 6234:179 6866:310 7486:77 7510:106 7709:223 7896:68 8162:108 8374:50 8700:53 8862:55 9059:61 9157:42 9424:38 9565:135 9745:128 10457:12 10616:80 10626:202 10660:105 10677:61 10703:559 10779:92 10848:74 10933:42 11106:1 11201:145 11224:107 11413:84 11441:85 11530:134 11685:57 11856:45 12136:130 12448:23 12508:81 12601:46 12610:104 12699:100 12864:94 12916:9 13211:81 13360:22 13385:79 13685:36 13857:69 13881:106 14161:28 14167:37 14349:34 14427:460 14652:9 14810:196 14945:128 15396:152 15473:56 15484:57 15924:78 16342:60 16810:225 17104:30 17255:115 18160:38 18364:31 18700:83 18924:158 19062:26 19095:39 19109:170 19162:63 19484:84 19801:56 19926:62 +626:260 1004:116 1040:97 1052:61 1363:45 1914:96 2162:60 2560:71 2934:89 3266:95 3326:36 3361:109 3396:119 3849:65 3989:11 4557:103 4707:107 4723:108 5417:104 5446:142 5639:23 5737:108 6060:98 6639:140 6813:53 6866:520 6973:89 7055:259 7494:170 8333:127 8556:110 8709:4 8799:62 9226:252 9596:383 9800:105 9943:82 9987:90 10161:64 10244:408 10457:41 10510:60 10532:258 10650:153 10848:265 10872:152 11106:193 11330:115 11400:54 11413:103 11460:205 11871:156 12170:360 12238:105 12916:95 13728:96 13881:81 14239:89 14531:100 14665:75 14697:110 15626:135 16006:118 16223:95 16432:124 16903:74 16974:198 17298:155 17487:163 17582:110 17621:119 17833:149 18011:52 18323:93 18334:170 18364:34 18493:124 18737:99 19255:93 19269:95 19386:58 19415:183 19685:138 19720:62 19894:158 +49:40 626:294 723:59 928:82 1040:73 1044:91 1052:69 1493:137 1691:91 1838:105 2243:72 2477:53 2621:79 2990:134 3770:155 4494:157 6122:116 6234:78 6666:64 6696:113 6866:492 7086:100 7257:58 7494:54 7510:86 7709:146 7839:67 8488:68 8556:126 8720:77 9098:98 9596:650 10244:462 10532:78 10618:89 10848:19 11172:51 11869:115 12170:203 12238:117 12481:78 12916:98 12992:169 13063:82 13143:88 13285:192 13384:66 13388:54 13869:44 14239:101 14270:62 14385:21 14729:96 14880:78 15238:58 15326:103 15740:98 16029:174 16793:104 16974:224 17487:184 18235:57 18916:164 19685:234 19801:10 19894:38 +431:127 992:100 1389:109 1436:106 1810:59 1822:125 2528:41 2990:83 3043:51 3578:20 3598:30 3735:547 3924:655 4085:46 4820:129 5048:130 5128:59 5446:11 5609:89 5767:116 5831:60 6234:168 6236:115 6264:129 6372:114 6565:31 6678:64 6866:421 7118:37 7281:100 7488:68 7494:64 7850:145 7908:93 8609:79 8943:74 10241:177 10532:89 10620:74 10799:159 11014:90 11400:68 11805:132 11871:34 12010:37 12191:46 12215:69 12406:35 12601:108 12677:75 12711:171 12896:133 13596:34 13685:73 13829:78 13881:61 13907:52 13929:68 13982:41 14024:36 14375:71 14551:134 14894:312 14915:106 15294:64 15332:65 15448:98 15798:75 16682:42 16898:30 17102:113 17985:50 18115:104 18792:153 18916:49 19132:51 19255:124 +247:72 257:73 1052:77 1057:79 1598:556 1619:13 1780:149 2528:144 2660:46 2751:45 2990:90 3022:48 3578:20 3614:103 4318:128 4554:74 4614:110 4732:134 4983:117 5178:182 5538:111 5825:72 6026:224 6234:75 6705:108 6735:34 6859:96 6865:74 6866:445 6927:240 6994:131 7086:111 7716:148 7850:149 8044:235 8160:105 8253:87 8475:73 8548:52 9530:121 9597:191 10161:126 10386:48 10532:33 11106:155 11187:57 11202:143 11290:65 11400:89 11518:75 11871:237 12968:18 13474:156 13829:78 13929:69 14222:80 14665:48 14711:62 14800:117 14945:135 15128:36 15294:66 15450:448 15777:66 15865:73 16905:68 17031:50 17274:345 17731:127 17839:53 18241:33 18304:97 18397:162 18832:123 18943:61 19109:78 19707:128 +257:106 626:178 661:238 922:175 974:136 1040:59 1052:55 1316:75 1914:88 1968:152 1981:147 2092:28 2185:89 2456:14 2934:84 3012:100 3747:32 3906:82 4030:68 4376:18 4481:36 4655:80 4723:99 6111:119 6368:96 6866:256 7060:131 7145:51 7494:74 7709:97 7710:173 9105:66 9325:132 9596:174 9900:36 10161:23 10241:173 10244:371 10276:74 10381:154 10457:38 10802:103 10848:137 10936:55 11106:89 11181:163 11330:104 11400:83 11871:178 12170:572 12238:83 12407:68 12431:101 12604:77 12841:201 12916:90 12917:88 12973:113 13006:90 13079:47 13224:113 14239:81 14665:69 15144:108 15204:41 15376:26 15777:14 16006:215 17128:321 17487:148 17948:97 18364:31 18493:113 18800:109 18943:86 19024:90 19174:69 19187:110 19309:75 19685:188 +80:96 247:40 520:42 1448:125 1588:76 1715:79 2339:38 2764:75 2808:89 3065:67 3208:90 3433:30 3578:33 3614:31 3713:52 3965:581 3976:149 4205:67 4214:123 4232:45 4328:47 4345:72 4576:126 4723:38 4792:87 4925:58 5014:36 5379:90 5631:97 5783:46 5877:78 6042:138 6234:72 6372:94 6406:106 6866:103 7363:33 7371:128 7379:33 7483:105 7494:232 7560:51 7709:129 7728:139 7797:68 7894:98 8164:72 8268:89 8337:68 8402:93 8469:153 8475:214 8548:29 8703:83 8791:118 8799:86 8854:88 9043:47 9359:160 9424:87 10009:62 10241:69 10276:85 10532:172 10735:31 10848:88 10936:188 11006:41 11074:121 11085:148 11107:155 11463:70 11588:48 11668:43 12363:61 12400:59 12547:166 12607:39 12610:24 12636:66 12704:168 12916:3 12935:34 12969:236 13008:114 13152:198 13378:122 13384:63 13519:64 13653:61 13688:124 13929:19 14067:35 14161:16 14375:80 14697:38 14927:83 15029:75 15389:81 15748:87 15762:125 15798:84 15925:271 16034:147 16094:59 16160:81 16464:45 16810:60 16934:80 17017:51 17350:105 17374:86 17479:138 17691:200 17859:82 18224:64 18364:12 18761:110 18986:60 19095:89 19117:51 19220:94 19255:122 19327:156 19386:40 19499:205 19594:76 +476:396 799:186 833:89 936:112 1057:72 1070:44 1244:38 1671:190 1771:45 1824:78 1889:142 1968:120 2251:142 2286:94 2360:108 2385:96 2398:74 2528:159 2569:105 2621:46 2652:58 2736:44 2789:52 2990:191 3012:64 3578:6 3876:71 3896:54 3932:116 4489:65 4978:144 5007:174 5021:143 5043:58 5057:34 5175:62 5683:50 5733:91 5775:67 5783:126 5877:41 5903:59 5959:35 6072:161 6095:92 6101:59 6110:133 6254:79 6271:206 6418:175 6701:111 6863:74 6866:601 7055:31 7065:86 7406:77 7494:100 7709:516 7729:86 7746:163 7791:79 7844:47 7947:55 7952:110 8127:171 8260:103 8273:98 8362:118 8573:89 8586:77 8587:114 8703:43 8804:92 9085:105 9158:15 9166:45 9698:58 9708:99 10537:119 10913:75 10976:100 11036:60 11140:58 11168:48 11250:85 11503:81 11603:76 11685:113 11853:75 11871:69 12238:89 12870:118 12916:9 13506:127 13553:69 13838:133 14161:17 14329:73 14715:121 14785:118 14926:80 14940:295 15058:70 15079:167 15292:109 15303:43 15377:102 15586:50 15777:113 16039:12 16201:78 16401:69 16574:99 16581:52 16602:90 16682:91 16724:64 17070:100 17104:36 17205:162 17277:57 17279:41 17297:65 17332:68 17570:114 17615:150 17780:89 17961:15 17968:79 17970:38 18160:45 18210:87 18483:90 18818:71 19109:122 19210:34 19255:88 19328:108 19502:296 19588:69 19699:32 19708:107 19739:92 +42:54 131:55 186:213 268:292 303:32 496:190 520:58 561:72 692:41 877:105 958:25 978:52 1010:42 1181:39 1515:39 1549:25 1560:36 1824:44 1952:154 2109:69 2233:132 2290:26 2389:35 2398:41 2986:71 2990:97 3084:124 3174:27 3318:145 3515:79 3578:48 3959:85 4023:39 4200:27 4288:93 4514:113 4755:27 5041:228 5051:45 5068:34 5140:49 5232:62 5303:60 5364:46 5507:105 5551:24 5803:33 5997:111 6060:94 6153:21 6188:55 6238:44 6374:39 6600:104 6662:73 6866:99 7027:265 7055:17 7210:33 7250:131 7363:19 7377:60 7441:80 7494:124 7536:77 7658:64 7709:63 7746:30 7780:34 7848:111 8078:87 8262:26 8293:47 8402:86 8411:67 8485:59 8786:57 8801:51 8905:34 8986:84 9124:52 9166:356 9218:69 9417:120 9573:56 9860:83 9998:30 10200:30 10241:139 10441:65 10446:29 10532:85 10535:28 10789:45 10799:118 10925:27 11200:43 11350:34 11631:29 11791:41 11871:157 11879:54 12065:37 12206:126 12296:79 12319:20 12448:15 12692:38 12710:66 12721:95 12747:149 12798:65 12804:31 12916:12 12966:73 12996:35 13005:44 13029:48 13256:123 13258:29 13355:56 14119:34 14161:47 14414:49 14543:74 14556:39 14851:56 15136:224 15253:52 15303:383 15335:38 15355:24 15552:102 15780:15 15793:120 15869:114 16162:37 16406:33 16464:26 16514:41 16709:81 16713:36 16767:42 16811:56 16976:54 16983:43 16997:23 17104:40 17138:83 17173:18 17184:100 17189:107 17211:34 17270:73 17272:27 17273:117 17471:27 17715:54 17974:46 18115:72 18314:29 18388:44 18397:108 18401:48 18637:68 18893:48 18976:16 19109:114 19144:91 19172:112 19174:91 19290:51 19674:42 19704:45 19921:86 +80:120 463:74 1052:158 1395:103 1987:100 2751:89 2851:131 2990:206 3051:97 3150:135 3204:21 3270:47 3534:108 3578:42 3988:123 4521:142 4576:211 4707:93 6234:18 6571:192 6866:396 7494:192 7541:170 7709:366 7739:169 7872:105 8027:141 8609:72 8631:141 8804:114 8925:101 8943:104 9749:73 9859:127 10377:93 10430:160 10532:202 10779:264 10799:98 11224:307 11588:241 11688:71 11871:182 11968:154 12191:104 12206:97 12406:123 12448:65 12543:89 12674:18 12798:134 12916:14 13026:75 13857:197 13881:118 14161:40 14460:78 14556:169 14848:122 15797:102 16496:288 16747:58 16810:215 16851:100 17546:522 18169:87 18709:79 18737:2 19024:50 19109:105 19210:83 19327:78 19468:48 19643:33 19921:74 19970:32 +225:143 520:83 981:45 1057:100 1100:35 1715:38 1824:35 1878:93 1998:137 2339:36 2450:21 2541:47 2630:186 2690:44 2728:73 2751:111 2820:42 2828:663 2934:304 3266:62 3578:26 4130:98 4259:53 4494:134 4510:35 5101:93 5166:58 5412:92 5417:100 5773:69 5869:55 5877:74 6024:91 6088:96 6133:77 6234:82 6674:70 6829:62 6866:566 7076:75 7089:63 7097:62 7257:107 7363:31 7494:34 7510:15 7709:393 7733:97 7754:102 7957:79 8288:86 8475:97 8631:106 8722:63 8804:195 9396:121 9590:116 9627:84 9671:74 9757:48 9960:77 10109:77 10229:53 10417:79 10443:141 10668:48 10799:88 10848:125 11018:48 11036:55 11088:79 11168:44 11290:87 11445:105 11487:54 11675:81 12206:159 12319:32 12547:52 12584:155 12610:45 12704:180 12916:5 12928:158 13305:55 13698:65 13804:45 13881:164 13907:140 13929:36 13967:61 14167:40 14685:100 14851:61 15239:93 15323:527 15389:153 15697:97 15764:81 15798:40 16810:62 16826:20 17017:97 17173:29 17343:125 17475:77 17681:83 17772:66 18115:138 18323:135 18364:23 18372:88 18964:119 19117:49 19278:54 19753:150 19873:48 +247:154 611:114 2894:64 3237:101 3244:109 3578:22 3976:66 4257:426 4299:10 4308:187 4494:34 5119:130 5128:129 5946:59 6372:86 6374:65 6449:87 6859:102 6866:79 7206:80 7494:82 7541:125 7844:35 7896:106 8325:86 8548:56 8799:78 9223:73 9765:129 9905:125 10532:124 10998:94 11106:45 11245:27 11400:98 12426:24 12438:715 12610:186 12870:17 12916:39 12968:74 13006:67 13143:119 13633:158 13881:127 13929:74 14161:63 14167:164 14258:73 14330:90 14361:66 14880:70 14895:11 15561:68 15777:147 15780:50 16232:152 16810:106 16866:18 16995:83 17218:95 17782:55 18364:23 18637:127 18737:91 19109:71 19774:152 19941:117 19947:154 +520:154 984:101 989:21 997:126 1316:145 1708:88 1912:90 1968:178 2538:92 2790:376 3148:49 3232:49 3425:60 3515:104 3578:17 3976:91 3992:83 4352:69 4765:139 5466:79 5760:136 5783:106 6234:82 6285:104 6866:163 7206:99 7494:19 7708:97 7709:252 7864:94 8408:33 8490:174 8799:71 9331:169 9763:49 9900:77 9966:98 10161:63 10187:30 10532:163 10717:131 10848:87 11303:65 11542:376 11668:131 11871:170 12238:85 12322:78 12598:25 12863:262 12916:13 13165:124 13191:422 13829:87 13881:100 13971:135 14606:180 14999:40 15523:126 16290:130 16745:74 17523:157 17621:254 17943:106 17948:88 18201:363 18314:307 18364:37 18676:129 18792:162 19707:36 19735:58 19770:103 19774:120 19787:145 19926:127 +80:68 143:149 268:42 370:271 400:61 732:44 1001:50 1033:119 1043:134 1515:192 1563:291 1565:121 1691:47 1785:88 2281:108 2339:54 2690:67 2897:143 3384:329 3392:88 3433:43 3515:97 3578:16 3662:100 3830:58 3860:62 4023:95 4731:129 4925:83 4993:105 5379:9 5473:39 5664:74 5665:103 6617:111 6662:60 6859:74 6866:380 6917:64 7254:105 7338:111 7360:102 7494:281 7551:73 7709:221 7790:124 8262:63 8488:61 8737:60 8759:62 8786:70 8897:87 9056:59 9856:131 10161:56 10428:62 10622:8 10799:62 11106:66 11153:106 11233:160 11380:59 12093:130 12379:73 12517:96 12648:123 12916:29 12917:121 12968:90 13006:49 13067:111 13407:148 13433:85 13829:103 13881:183 13961:93 13964:58 14167:60 14498:164 14529:349 14556:48 14784:118 15073:164 15281:116 15294:51 15433:69 15731:126 15780:109 16399:52 16464:65 16478:162 16913:130 17621:59 17990:88 18115:86 18364:102 18800:80 18972:80 18998:132 19089:121 19144:54 19174:37 19255:91 19645:192 +111:110 989:85 1279:68 1303:99 1316:77 1990:91 2065:131 2690:52 2847:52 2934:50 3308:37 3578:18 3870:54 4211:67 4308:53 4346:97 4494:94 4508:30 4579:40 4925:65 4980:6 5052:145 5305:44 5867:52 5946:51 6011:59 6069:103 6234:146 6406:118 6696:93 6866:578 6917:50 7494:229 7709:70 7896:113 8050:56 8548:32 8789:74 8817:53 10391:31 10848:157 11164:56 11224:91 11525:128 11685:67 12191:46 12238:83 12380:48 12406:79 12463:54 12582:24 12589:93 12610:27 12916:8 13019:147 13306:511 13645:131 13685:45 14161:72 14179:93 14665:29 14697:43 14947:87 15448:43 15730:55 16006:46 17173:103 18406:64 18850:774 18921:204 19174:29 19255:156 19293:112 19386:22 19410:79 19926:130 +241:87 372:104 378:119 831:41 907:38 980:68 1026:79 1043:43 1052:38 1273:93 1312:70 1436:76 1889:31 1951:73 2894:59 2934:140 2957:13 2990:66 3012:91 3087:121 3372:129 3515:60 3578:10 3874:94 4182:71 4308:256 4561:152 4593:95 4699:115 5128:236 5154:133 5250:132 5367:111 5917:76 5959:60 6234:65 6324:586 6337:181 6374:59 6866:418 7374:74 7494:310 7682:52 7709:279 8967:254 9276:114 9834:116 10020:58 10206:249 10293:122 10508:125 10532:106 10799:118 10848:77 12402:35 12582:110 12610:128 12704:128 12916:33 13006:61 13191:122 13384:85 13428:93 13633:145 13929:34 14161:57 14167:151 14856:78 14947:62 15523:119 15925:242 16094:106 16433:31 16478:100 16581:89 16584:60 16810:176 16995:76 17224:199 17267:75 17798:21 17899:96 18160:77 18186:157 18190:120 18314:89 18364:21 18397:14 18752:45 18768:85 18784:56 19247:119 19278:51 19386:36 19415:70 19525:61 19774:209 19874:117 19953:127 +161:74 657:35 981:34 1039:54 1209:65 1282:46 1539:62 1987:28 2311:34 2528:99 2638:81 2728:437 2828:423 2844:83 2990:84 3213:114 3361:161 3515:24 3578:16 3988:35 4130:73 4318:74 4413:103 4554:29 4576:30 4687:36 5128:47 5298:150 5379:43 5869:41 5934:77 5937:88 5959:24 6024:338 6027:57 6074:35 6088:71 6224:71 6234:45 6829:46 6857:124 6866:51 6917:32 7076:56 7183:54 7494:102 7658:39 7709:148 7733:72 7850:29 8262:31 8666:39 8722:47 8737:30 8813:153 8843:59 9083:61 9143:63 9751:75 9763:8 9984:81 10035:66 10161:38 10428:34 10530:35 10532:187 10549:132 10705:58 10779:74 10881:141 11162:124 11303:80 11871:52 12060:83 12237:86 12419:145 12448:18 12484:53 12610:34 12798:61 12855:49 12916:5 12966:170 13645:42 13685:58 13829:66 13881:110 13929:27 13964:29 13967:45 14053:102 14161:23 14384:111 14501:32 14556:24 14685:74 15323:589 15492:24 15798:30 15799:89 16190:149 16478:127 16515:126 16559:79 16810:111 16936:37 17006:54 17109:55 17357:53 17859:115 17896:77 17944:60 18039:59 18304:102 18364:17 18372:94 18435:33 18916:137 19117:109 19278:20 19327:88 19454:85 19542:83 19696:83 19774:55 19786:113 19787:65 +96:384 348:92 596:60 608:75 692:93 1070:56 1436:155 1683:119 1914:44 1968:38 2015:48 2137:161 2394:60 2717:83 2773:81 2851:69 2901:67 3037:148 3433:119 3484:58 3515:45 3578:29 3747:74 3826:224 3882:55 4526:114 4540:116 4614:40 4795:198 4831:86 5057:43 5939:114 5946:93 5959:44 6060:46 6147:13 6234:59 6409:145 6866:233 7055:40 7287:65 7494:50 7709:239 8036:60 8092:49 8301:81 8548:38 8617:105 8804:146 9117:156 9386:119 9390:93 9414:145 9424:57 9614:338 9905:58 9960:67 10043:21 10218:132 10319:135 10329:178 10443:59 10532:63 10735:41 11400:79 12036:47 12469:93 12798:129 13061:74 13288:56 13470:95 13881:148 14074:192 14404:47 14406:82 14556:45 14697:101 14773:115 14950:156 15292:69 15492:136 15605:58 15697:68 15780:135 16112:153 16584:89 16797:69 16913:120 17173:41 17270:111 17328:85 17369:304 17452:132 17638:98 17679:97 17784:65 17970:49 18050:47 18084:96 18115:80 18153:100 18413:76 19210:87 19260:20 19386:27 19787:86 +80:133 624:68 732:172 1157:37 1424:18 1565:235 2185:104 2990:148 3244:114 3320:201 3578:15 3614:75 3861:69 4593:83 4724:168 5783:62 5937:116 5974:129 6060:127 6167:85 6234:126 6285:150 6522:35 6865:225 6866:331 7182:163 7287:59 7709:285 7775:60 8126:154 8162:141 8217:81 8490:51 8759:154 8789:78 8794:337 8799:58 8821:93 8980:275 9233:65 9816:32 10035:46 10161:98 10276:78 10345:15 10532:178 10671:16 10739:36 11162:70 11400:53 11695:112 12007:10 12319:95 12666:184 12798:94 12916:28 13224:108 13881:95 13918:289 14161:45 15116:84 15326:73 15374:148 15902:195 16328:97 16478:78 16826:100 18115:49 18416:511 18445:112 18711:174 19161:65 19224:262 19255:56 19309:79 19410:76 19707:67 19756:185 +257:96 349:132 416:70 492:81 525:87 569:46 865:91 1040:11 1052:41 1282:444 1388:67 1620:102 1880:114 1914:33 1987:39 2338:51 2400:118 2512:68 2630:48 2897:49 3149:97 3578:27 3796:112 3830:96 3943:107 3976:77 4308:47 4554:40 4576:41 5128:32 5298:39 5315:61 5473:121 5612:50 5760:90 5869:56 5877:38 6234:77 6372:100 6778:218 6865:79 6866:317 7248:55 7494:281 7709:34 7729:40 8050:58 8300:80 8475:18 8527:200 8703:40 8801:103 9157:94 9325:124 9446:30 9450:52 9721:126 10122:180 10241:55 10441:55 10532:109 10771:81 10779:154 11106:93 11253:39 11261:79 11359:126 11685:93 11695:119 12022:70 12238:66 12319:33 12448:50 12610:140 12871:78 12916:18 13218:78 13829:155 13881:69 14161:47 14222:43 14384:12 14454:170 14699:44 15399:327 15853:51 15914:75 16114:65 16218:84 16682:67 16724:61 16936:52 17173:30 17519:149 17632:618 17990:60 18364:35 18734:55 19327:61 19386:39 19674:71 19774:38 +981:277 1016:57 1052:61 1057:43 1436:28 1889:36 2003:84 2315:100 2820:129 2990:80 3578:16 4387:205 4821:193 5031:563 5220:143 5270:282 5379:88 5679:79 6577:143 6866:404 7494:150 7709:225 7729:120 8433:113 9276:95 9325:13 9450:92 10154:99 10394:42 10735:182 10753:116 10848:115 11303:29 11400:150 11633:41 11764:132 11871:101 12355:103 12916:4 13613:64 13881:74 13929:55 14504:17 15280:66 15448:9 15492:99 15798:243 15901:46 16034:106 16223:106 16524:154 16817:185 18493:125 18745:170 18764:104 19278:83 19287:380 19290:257 19386:58 19436:119 19889:139 +8:121 85:99 364:607 379:369 626:267 746:141 803:33 919:122 1004:88 1057:65 2321:107 2436:43 2439:80 2720:145 2934:89 4640:97 4685:48 4820:44 5487:103 6234:131 6619:15 6866:295 7709:227 7739:200 7866:88 8099:225 8789:63 8858:42 8891:82 9180:205 9358:66 9683:62 9908:126 10244:139 10276:167 10443:59 10532:149 10661:134 10688:36 10708:66 10848:117 11871:212 12238:81 12704:160 12732:245 12916:34 13143:78 13210:108 13929:112 14556:199 14880:25 15781:34 17871:116 17961:42 18007:69 18037:317 18372:50 18800:96 19278:170 19699:48 19787:109 +2726:35 2934:128 2990:47 3270:82 4153:370 4604:100 5956:78 6234:64 6529:140 6866:71 6981:146 7319:84 7392:117 7494:198 8078:180 8804:105 8817:343 9276:97 9614:309 9673:100 10477:75 10532:153 10753:61 10848:135 11380:301 11400:98 11530:121 11871:343 12054:19 12098:562 12173:111 12238:124 12379:374 12780:4 12916:21 13384:69 13529:128 13881:58 13897:25 13982:109 14191:95 15294:261 15605:40 16478:189 16810:170 17069:164 17395:77 18902:70 19341:98 +107:53 247:51 1040:29 1052:27 1481:72 1684:83 2847:92 2934:251 2990:92 3307:26 3578:29 3766:90 3976:31 3989:6 4472:72 4554:53 4640:75 4798:20 4857:71 5076:117 5128:86 5228:34 5353:821 5379:107 5417:153 5698:267 5760:147 5899:113 6557:133 6865:105 6866:291 7080:42 7494:177 7556:65 7709:286 8154:243 8262:58 8298:64 8488:30 8548:37 8656:38 8817:61 8913:58 9109:94 9221:62 10143:166 10241:147 10276:37 10532:137 10535:63 10579:119 10925:60 10965:104 11171:98 11245:157 11871:58 12038:99 12134:42 12607:51 12610:93 12841:153 12916:9 13485:45 13685:53 13881:61 13911:93 14039:118 14161:42 14543:101 14697:99 14880:151 14883:39 14947:72 15032:122 15107:47 15294:47 15549:53 15896:163 16160:60 16389:22 16433:54 16635:61 17017:67 17061:130 17227:51 17417:82 17961:68 18084:94 19024:141 19174:34 19255:254 19327:81 19386:26 19996:70 +287:60 569:50 891:88 1040:20 2015:66 2191:112 2784:76 2857:96 2887:213 3156:142 3363:113 3490:119 3578:10 3656:29 3702:108 4274:92 4494:137 4552:76 4579:103 4744:170 5170:118 5178:179 5683:96 5799:375 5867:169 5946:89 6234:113 6315:93 6866:98 6950:47 7363:60 7709:260 7782:79 8126:85 8150:205 8631:101 8637:35 8703:75 8789:155 9020:58 9092:72 9468:183 9900:61 10237:145 10443:53 10457:26 10532:216 11120:535 11330:72 11801:123 11842:71 11869:80 11871:96 12007:18 12238:222 12798:107 12916:8 12968:134 13522:25 13790:38 13881:106 13901:118 14168:145 14198:481 14247:166 14258:37 14358:95 14703:138 15048:185 15292:94 18156:23 18190:108 18628:43 18837:92 19109:163 19247:99 19985:71 +203:56 303:61 630:127 732:35 749:145 819:248 891:110 1070:48 1166:176 1268:114 1715:45 1782:100 2042:89 2075:75 2118:34 2311:54 2902:54 2977:164 2990:109 3220:96 3296:48 3422:66 3443:82 3470:459 3505:83 3515:38 3578:25 3862:112 3882:47 3915:130 4247:77 4640:21 4962:76 5041:44 5128:38 5133:107 5178:112 5253:59 5402:101 5487:104 5867:53 5945:99 5959:38 6060:92 6222:68 6451:253 6662:48 6790:90 6866:156 6912:71 7055:34 7596:221 7709:236 7729:47 8150:64 8301:69 8347:93 8774:89 8804:84 9390:79 9424:49 10149:113 10293:159 10377:42 10532:162 10735:36 10744:50 10846:47 10989:122 11125:64 11245:64 11622:97 11871:116 12206:102 12463:21 12970:115 12987:204 13258:57 13881:196 14131:63 14167:48 14207:76 14224:95 14587:86 15421:84 15433:55 15448:109 15853:60 15868:81 15885:16 16593:35 16995:49 17343:97 17679:83 17711:53 17884:32 18110:145 18124:104 18406:130 18745:66 18768:271 18776:113 18795:98 18800:129 18898:168 19109:101 19386:68 19420:91 19739:50 +101:45 169:161 225:117 412:100 507:102 625:179 867:117 962:81 1070:58 1081:117 1279:83 1437:113 1586:20 1704:70 2044:34 2123:113 2173:101 2635:83 2760:105 2789:69 2902:132 2918:54 2990:109 3187:111 3314:85 3458:94 3578:23 3711:95 3746:68 3784:96 3959:22 4134:83 4200:64 4308:66 4526:118 4603:75 4614:82 4687:70 4751:80 5057:45 5081:55 5163:75 5226:137 5253:71 5507:62 5529:96 5670:78 6300:131 6347:150 6837:64 6866:456 6927:60 6933:93 7055:82 7164:122 7363:46 7494:124 7654:103 7709:167 8078:68 8373:141 8804:98 8897:83 9325:61 9570:51 9614:58 9702:46 9824:69 9891:94 10161:126 10241:147 10316:94 10457:20 10532:26 10535:67 10744:120 10848:101 11040:80 11206:83 11695:37 11711:97 11840:37 11993:103 12161:68 12231:124 12448:35 12540:68 12556:85 12916:4 12935:46 12968:62 13006:94 13224:84 13235:125 13374:106 13739:131 14067:47 14161:22 14167:173 14179:69 14501:62 14556:92 14574:88 14729:77 14749:66 14834:157 14844:63 14915:81 15374:73 15375:117 16115:88 16201:103 16615:135 16810:50 16886:109 17069:64 17272:63 17531:94 18007:109 18010:129 18050:48 18160:59 18323:79 18714:36 18760:126 18818:93 18928:161 19109:82 19174:36 19265:341 19386:110 19455:67 19473:54 19774:267 19818:86 19890:342 +493:55 520:68 548:87 732:93 744:117 982:86 1070:64 1268:130 1889:51 1951:62 2141:122 2985:143 2990:155 3006:59 3010:82 3341:444 3491:53 3578:17 3587:78 4132:111 4308:145 4707:56 4785:160 4980:67 5128:50 5934:82 5959:101 6374:100 6815:78 6829:99 6866:20 6994:12 7080:114 7129:123 7334:36 7363:50 7494:58 7709:219 7777:145 8036:83 8548:43 8568:162 8710:60 8720:55 8778:73 9734:84 10453:58 10532:202 11069:96 11224:62 11249:40 11326:48 11681:82 11698:76 11871:74 11985:310 12238:130 12448:39 12607:60 12610:36 12632:77 12643:102 12818:102 12916:6 12968:96 13006:52 13191:104 13685:62 13829:66 13881:194 13923:169 13967:97 14161:49 14167:128 14245:105 14652:95 15294:54 15619:582 15747:70 16081:59 16212:30 16232:118 16525:48 16995:65 17054:95 17061:101 17337:119 17588:23 17636:101 18011:64 18218:73 18314:75 18768:72 18800:87 18986:90 19192:146 19455:74 19707:50 19774:118 19926:89 19990:75 +88:150 830:191 2050:18 2356:108 2894:136 2934:39 2990:188 3515:488 3578:11 4191:347 4514:73 4579:119 4807:71 5220:121 5551:86 5811:296 5877:81 6577:178 6866:156 6868:50 7494:81 7709:218 8099:157 8217:36 8308:123 8342:108 8925:85 9422:72 9581:90 9923:191 9987:65 10161:91 10203:124 10457:59 10532:117 10632:530 10744:90 10848:177 11106:109 11187:50 11303:104 11419:94 11429:76 11642:131 11871:152 12679:77 12916:38 12937:41 12968:59 12982:106 12984:127 13881:70 14161:133 14749:99 14880:86 16478:128 16807:78 17343:86 17356:162 17480:71 17810:131 18356:147 18372:65 18493:89 19210:68 19278:59 19656:24 19848:180 +92:154 572:59 612:86 1140:55 1312:103 1316:150 2269:30 2312:247 2528:75 2707:122 2897:80 2934:95 3125:139 3372:75 3515:54 3578:26 4072:74 4328:75 4385:53 4526:206 4957:117 5825:105 5877:63 5903:91 6148:94 6153:58 6829:105 6866:327 6882:71 7139:162 7494:53 7658:88 7682:148 7709:271 7938:90 8361:115 8480:547 9018:98 9043:75 9046:115 9715:92 9792:89 10200:81 10275:41 10532:285 10598:302 10999:92 11125:180 11342:55 11642:56 11695:48 12170:50 12319:55 12666:318 12916:14 13473:39 13931:87 14880:87 15806:323 16006:65 16552:88 16602:139 16826:70 16905:42 16912:151 17041:65 17173:99 17418:78 17855:68 17990:99 18172:129 18259:95 18364:19 18483:139 18964:96 19249:60 +89:33 143:97 158:47 534:315 732:49 963:46 1010:38 1040:30 1060:42 1210:49 1244:20 1388:37 1516:122 1565:45 1968:108 1990:22 1994:51 2186:35 2202:51 2339:20 2380:65 2395:37 2707:41 2720:57 2723:96 2894:53 2920:49 2923:62 3062:288 3069:371 3084:90 3308:18 3352:28 3393:40 3433:16 3515:54 3578:15 3685:175 3860:23 3870:26 3896:56 3988:26 4521:151 4597:31 4637:62 4733:227 4976:40 5298:90 5325:95 5523:30 5596:27 5608:126 5795:124 5874:25 5877:21 6148:80 6234:86 6236:34 6253:28 6866:481 7182:63 7248:60 7284:102 7482:42 7494:171 7657:77 7669:34 7709:96 7769:130 7893:78 8034:38 8151:119 8264:219 8325:124 8342:134 8475:69 8601:36 8722:36 8789:76 8804:122 8805:46 8817:50 8897:32 8980:106 9056:22 9757:54 9860:25 9939:83 10154:39 10186:63 10187:24 10206:50 10241:17 10276:60 10443:113 10457:15 10532:141 10534:53 10773:27 11051:58 11400:24 11698:54 12290:141 12418:41 12448:124 12491:40 12595:200 12666:106 12704:100 12916:65 12932:115 12968:168 13152:53 13178:1 13258:27 13474:65 13492:122 13553:36 14158:44 14244:36 14308:122 14608:42 14681:80 14682:104 14697:143 14973:21 15115:48 15261:57 15355:115 15472:44 15524:33 15586:26 15731:47 15780:14 15793:22 15890:60 16006:44 16131:44 16344:36 16365:28 16584:18 16609:36 16638:33 16644:44 16876:207 16995:23 17023:44 17134:204 17159:38 17238:55 17471:49 17761:45 17769:39 17846:108 17952:32 18084:39 18121:68 18174:60 18227:160 18364:89 18493:46 18711:33 18728:103 19053:42 19103:58 19240:79 19255:99 19278:61 19327:17 19347:44 19455:208 19809:22 19834:56 19914:40 19921:16 19970:5 +833:131 1273:47 1473:158 1743:58 1889:167 2185:79 2213:60 2407:95 2751:114 2894:37 2934:99 3053:56 3084:118 3280:71 3578:6 3617:66 3860:49 4594:83 5041:44 5057:37 5204:55 5253:59 5417:121 5419:142 5720:67 5759:74 5903:194 6027:128 6060:117 6234:108 6372:70 6794:97 6866:390 7129:117 7183:104 7457:619 7494:11 7709:147 7722:105 7729:47 7848:127 8449:131 8556:63 8710:92 8804:86 8840:1 8918:93 10532:500 10753:57 10779:178 10799:122 10878:34 11473:86 11668:96 11685:99 11871:97 12165:129 12347:40 12448:29 12473:83 12546:667 12772:111 12794:147 12798:93 12916:13 12968:127 13384:54 13881:124 14047:25 14119:66 14167:95 14628:84 15017:71 15315:115 15379:98 15429:57 15777:150 16094:67 16109:101 16325:51 16810:31 16931:64 16936:60 17711:53 17832:125 18144:87 18306:44 18364:13 18407:84 19255:105 19707:122 +101:79 1313:243 1424:40 1968:69 2751:115 2777:222 2781:65 2990:294 3578:26 3713:125 3988:116 4494:141 4502:57 4623:165 5792:23 5961:112 6097:112 6234:22 6865:96 6866:89 7494:93 7510:127 7709:316 7862:241 8070:72 8139:457 8433:33 8525:152 9153:82 9361:445 9529:105 9792:73 10035:76 10241:64 10443:70 10470:137 10750:146 10925:110 11229:71 11245:162 11822:77 11871:104 12543:34 12607:94 12968:122 13143:82 13929:90 14800:152 14896:143 15032:139 15229:154 15448:125 15798:100 16353:139 16706:135 16718:150 16864:133 16968:79 17069:175 17799:197 17818:94 18571:266 18894:119 18916:113 19024:89 19139:287 19162:74 19193:147 19889:114 19926:45 +95:219 448:136 548:74 629:63 965:73 985:54 1152:198 1316:21 1369:330 1547:151 1681:68 1990:53 2311:61 2312:99 2820:57 3064:59 3174:59 3307:127 3308:43 3457:52 3578:21 3746:64 3790:54 3993:157 4281:92 4396:147 4427:119 4757:89 5057:42 5210:151 5347:83 5633:92 5775:83 5867:60 6042:80 6863:92 6866:701 7079:18 7129:122 7473:92 7709:160 7926:88 7931:91 7996:143 8196:98 8753:71 8789:35 8799:121 9295:149 9581:52 9715:74 10043:83 10155:38 10377:48 10417:107 10516:82 10532:94 10628:59 10815:45 10848:37 10966:154 11342:44 11400:9 11454:33 11781:55 11871:217 12206:41 12855:154 12916:9 13120:124 13258:65 13853:92 13891:72 14226:89 14423:55 14623:605 14973:50 15433:124 15448:128 15806:260 15885:174 15966:100 16142:57 16367:74 16598:17 16773:139 16799:103 16810:58 16971:81 16992:92 17272:59 17332:83 17454:113 17829:107 18833:85 19115:95 19242:119 19278:37 19415:73 +579:66 626:156 645:49 799:150 1032:96 1306:109 1480:96 1564:91 1889:42 2094:94 2751:29 2934:139 2990:149 3051:122 3084:88 3596:109 3643:26 4002:108 4148:116 5385:90 5956:84 6199:33 6347:66 6382:77 6662:291 6866:372 7164:81 7287:106 7494:44 7709:386 9411:85 9801:103 9908:136 10126:89 10187:108 10367:31 10443:92 10795:129 10799:46 10848:81 11268:93 11392:154 11642:159 11823:67 11945:80 11994:356 12170:215 12796:55 12804:373 12972:85 13064:29 13529:93 13867:44 13982:1 14068:70 14239:214 14697:264 14731:117 16186:519 16326:149 16810:83 17058:107 17067:15 17487:195 17567:17 17742:83 18655:393 18964:93 19255:29 19463:6 19594:100 +101:62 105:27 1040:21 1432:163 1493:121 1889:120 1968:54 2051:25 2504:124 2720:127 2726:145 2990:237 4281:118 4614:56 4749:82 4763:109 4820:122 5041:73 5057:61 5426:102 5538:114 5789:99 5946:70 6125:69 6225:124 6234:144 6850:562 6866:131 7028:84 7334:116 7390:114 7494:40 7709:119 7729:77 7746:98 7770:51 8306:111 8349:109 8401:45 8491:30 9800:86 10457:27 10483:32 10848:88 11181:52 11253:28 12587:421 12704:67 12916:3 13049:189 13210:53 13560:59 13685:76 13929:35 14483:92 15177:74 15205:4 15332:78 15531:195 16061:413 16080:112 16292:195 16478:80 16540:97 16584:63 16926:96 17346:99 17523:96 18017:148 18069:85 18115:91 18364:22 18718:189 19247:82 19255:172 19765:229 19826:140 +126:114 593:98 626:221 732:152 919:127 1052:103 1057:52 1467:72 1527:128 1704:120 2934:90 3012:66 3307:56 3516:480 4425:38 5487:122 6866:356 7023:79 7091:82 7240:83 7407:218 7494:221 7709:66 7739:166 7846:68 7893:238 7980:150 8710:65 9180:170 9437:177 9800:69 10035:69 10244:116 10292:90 10394:86 10443:74 10599:102 10833:238 11006:203 11106:30 11842:150 12206:145 12238:128 12464:177 12916:21 13143:160 13529:136 13679:63 13881:76 14454:45 14556:165 14932:84 15014:66 15777:59 15844:78 18037:263 18964:44 19255:148 19325:53 19512:66 19664:90 19864:562 +101:41 257:34 288:120 919:71 1273:87 1715:48 1782:153 2339:47 2384:124 2493:84 2990:66 3578:20 4308:58 4345:89 4928:88 5016:115 5057:120 5093:108 5305:48 5491:103 6866:181 6868:58 6916:83 6927:53 7186:113 7494:63 7709:353 8077:8 8488:90 8737:51 8799:121 8945:113 9468:124 9711:43 9734:76 10004:142 10241:100 10470:70 10532:82 11164:76 11695:85 11781:52 11871:55 12054:36 12136:121 12151:314 12448:126 12894:23 12916:3 12985:78 13037:36 13407:63 13881:33 13929:23 14161:39 14168:100 14273:131 14743:64 14800:78 14820:72 15303:50 15411:102 15523:117 15723:92 15798:51 15956:136 15991:144 16478:77 16936:65 17245:116 17990:151 18048:88 18124:675 18160:53 18352:68 18559:76 18586:100 19109:116 19255:174 19290:432 19386:49 19727:66 19926:31 19953:79 +115:66 203:60 286:114 370:310 516:43 572:141 732:38 1040:7 1042:84 1052:26 1152:31 1271:91 1522:51 1549:26 1565:129 1810:100 1943:112 1951:25 2044:59 2240:35 2311:29 2502:98 2934:138 3012:92 3030:64 3095:49 3307:151 3433:18 3515:21 3578:34 3647:40 4008:113 4150:82 4200:29 4273:76 4554:25 4603:34 4955:55 4976:322 5424:53 5428:43 5596:31 5612:63 5945:27 6099:37 6114:58 6188:58 6264:69 6629:45 6866:213 6917:55 7055:55 7229:32 7494:77 7709:215 7739:41 7893:119 7938:69 8001:132 8078:30 8079:37 8099:23 8201:110 8333:54 8374:35 8703:25 8905:36 9157:30 9424:27 9446:19 9998:32 10154:140 10200:62 10238:34 10279:86 10292:76 10377:23 10408:78 10457:9 10530:31 10532:345 10535:30 10620:25 10631:39 10640:36 10735:38 10848:210 10876:42 10939:242 11018:31 11069:78 11212:471 11213:38 11224:25 11267:89 11400:48 11515:35 11588:59 11668:26 11871:251 11952:49 11958:36 11966:236 11968:38 12212:61 12238:87 12342:93 12363:75 12367:118 12459:51 12547:68 12610:15 12634:64 12798:129 12916:29 13864:33 13885:33 14150:53 14158:25 14161:10 14222:27 14305:133 14375:49 14404:22 14556:21 14762:22 14809:220 15329:86 15523:131 15630:25 15780:31 15841:85 15974:120 16112:35 16131:50 16382:26 16678:39 16912:87 17039:98 17272:28 17318:64 17457:30 18041:53 18050:22 18115:114 18121:117 18191:38 18227:257 18356:174 18364:37 18397:107 18768:29 18912:40 19056:50 19127:172 19172:59 19210:20 19250:125 19255:64 19788:38 19834:64 19921:18 +101:45 169:161 625:179 865:80 962:81 1057:104 1070:58 1081:117 1229:150 1279:83 1558:67 1659:9 2123:113 2173:101 2635:83 2726:81 2789:69 2902:132 2990:31 3187:111 3314:85 3539:17 3578:23 3643:102 3711:95 3746:68 3784:96 4200:64 4226:159 4308:66 4526:118 4603:75 4614:82 4687:70 4751:80 5057:45 5081:55 5226:137 5253:71 5379:188 5507:62 5529:96 5712:36 6642:47 6837:64 6866:375 6927:60 7055:82 7363:46 7494:111 7510:107 8050:73 8078:68 8748:96 8789:122 8861:142 8891:50 8897:83 9614:58 9824:69 9891:94 10316:94 10394:82 10443:43 10457:20 10532:100 10535:67 10744:120 10799:107 10940:73 11069:132 11303:147 11357:84 11529:101 11685:162 11711:97 11993:103 12161:68 12231:124 12448:35 12555:19 12556:85 12727:127 12916:4 12935:46 13006:94 13211:34 13235:125 13374:106 13739:131 14067:47 14082:83 14161:22 14167:173 14501:62 14537:49 14556:92 14574:88 14729:138 14749:66 14834:157 14844:63 14915:81 15374:73 15375:117 15438:101 15488:43 16201:103 16615:135 16810:85 16838:58 16886:109 17272:63 17531:94 18050:48 18115:66 18160:59 18261:78 18295:14 18374:65 18760:126 18818:93 18928:161 19174:36 19255:98 19265:341 19309:151 19386:110 19455:67 19473:54 19757:98 19774:267 19890:342 +106:586 446:66 560:107 626:224 833:108 1045:90 1052:105 2084:62 2934:300 3881:75 4232:38 5145:63 5298:185 5724:79 6866:161 7407:221 7494:145 7709:218 7739:168 8191:124 8489:56 8644:482 8763:54 8789:92 9143:100 9180:172 9306:43 10077:101 10244:117 10276:140 10833:241 11006:206 11447:59 11685:105 11871:315 12077:30 12565:76 12866:85 12916:21 13638:99 13881:227 14461:113 14556:168 14968:82 15066:56 15378:95 15457:69 16030:117 16034:165 16223:50 16290:91 16421:48 16745:68 17041:76 18037:266 18623:80 19108:145 19247:140 19278:143 19787:208 +443:604 879:95 1798:144 2339:78 2398:73 2478:187 2720:83 2934:143 2990:147 3266:138 3296:87 3433:31 3483:56 3578:17 3860:44 4104:104 4328:97 4526:44 4831:48 5233:52 5507:46 5720:66 5750:81 5753:81 5812:40 5867:48 5869:59 6027:114 6143:77 6234:474 6253:54 6347:82 6640:88 6866:289 6984:61 7210:59 7494:162 7514:61 7709:178 7739:35 7877:139 8150:117 8504:66 8542:84 8627:21 8703:42 8722:68 8728:117 8782:98 8789:38 9485:157 9929:49 10035:55 10241:51 10275:49 10292:98 10329:207 10377:76 10428:38 10508:104 10528:103 10535:50 10735:32 10848:129 10871:128 10872:54 11317:99 11400:131 11871:95 11998:14 12161:51 12245:112 12319:35 12448:26 12529:95 12582:114 12772:165 13187:100 13210:30 13288:43 13679:66 13881:75 13955:98 14067:71 14119:60 14375:41 14404:37 14561:102 15071:73 15292:54 15317:26 15377:97 15492:35 15660:50 15726:60 15747:102 15871:263 16029:14 16342:93 16651:136 16745:56 16810:26 16918:83 17196:64 17332:267 17818:75 17881:39 17898:79 17906:107 17970:38 17990:63 18050:36 18295:49 18594:55 18694:122 18715:17 19109:63 19210:34 19303:112 19674:75 19686:263 19739:45 19787:27 19911:80 19926:42 19935:55 +520:60 989:52 1057:110 1268:84 1493:63 2990:103 3049:121 3754:58 3784:412 4113:182 4494:170 5758:174 5946:76 6169:26 6234:205 6315:304 6600:120 6865:239 6866:488 7494:121 7709:326 7896:61 9763:60 10506:126 10532:91 10744:258 10799:92 11069:116 11245:75 11871:51 11929:81 12173:147 12191:54 12607:233 12672:96 13278:39 13881:23 13895:54 14403:145 14425:78 14436:15 14574:381 14638:76 14776:22 14851:190 15355:115 15743:120 16034:169 16478:140 16810:127 18656:91 18784:138 18964:43 19265:490 19699:98 19890:369 19941:67 +101:24 247:29 488:59 981:69 1052:30 1761:110 1951:30 1987:58 2213:38 2361:54 2630:142 2934:72 2990:130 3176:142 3251:43 3337:76 3345:51 3505:134 3578:24 3713:38 4184:19 4385:46 4554:59 4576:31 5128:48 5229:67 5625:45 5712:142 5824:114 5946:48 6234:88 6245:77 6313:98 6347:104 6374:48 6837:34 6866:244 6918:85 7144:50 7361:27 7363:24 7709:87 8099:55 8152:25 8548:62 8650:514 8789:65 8813:53 9331:40 9878:66 10035:149 10108:13 10470:42 10532:212 11227:74 11380:30 11501:79 11626:73 11686:52 11871:103 12007:130 12238:169 12448:75 12607:143 12610:35 12831:78 12916:6 13006:25 13272:71 13352:68 13519:46 13578:107 13588:103 13685:30 13907:59 14375:289 14398:61 14532:46 14809:29 14851:23 14987:42 15798:30 15853:38 16029:33 16034:17 16218:685 16232:56 16353:127 16488:60 16997:28 17017:37 17069:36 17267:30 17318:38 17508:90 17904:41 18007:90 18115:136 18364:35 18366:102 18894:36 18923:26 19327:23 19386:29 +118:168 247:106 269:93 349:360 2073:137 2240:154 2419:37 2593:127 2990:200 3176:100 3578:29 3601:118 3988:131 4319:94 4557:26 4576:113 5370:61 5449:68 5487:111 5495:67 5581:83 5781:135 6234:166 6449:119 6866:221 6917:241 7091:82 7260:91 7709:104 7850:108 7896:22 8488:29 8507:86 8548:153 8630:87 8638:87 8789:140 8792:686 8799:84 8821:108 9350:145 9379:291 9396:79 9698:150 9776:150 10276:75 10443:188 10532:190 10664:140 11792:22 11871:139 12607:105 12610:191 12916:15 12958:52 13143:87 13384:118 13529:37 13597:39 13829:46 13881:14 13884:88 13929:51 13964:108 14258:98 14322:125 14384:95 15378:127 16342:96 16548:115 17307:1 18308:80 18372:63 19255:67 19278:77 19386:54 19873:137 +579:107 981:229 1052:51 1747:476 2325:83 2820:213 2990:106 3578:27 4087:38 4096:210 4493:57 4750:151 4895:276 5625:148 6354:140 6866:505 6895:162 7171:101 7494:83 7709:77 7850:98 8374:139 8759:47 8821:26 9143:129 9155:165 9427:64 9546:97 10084:105 10443:141 10532:43 10848:97 11069:72 11668:102 11775:69 11856:123 11871:129 12576:551 12810:78 13143:82 13384:44 13685:98 14146:54 14313:82 14474:96 14532:154 15046:63 15282:65 15605:68 15901:126 16022:207 16114:75 16464:109 16610:171 16664:60 17049:119 17092:193 17878:44 18215:121 18493:103 19255:219 19287:157 19386:96 19643:61 +520:57 776:146 1057:184 1362:114 1513:4 1515:92 1978:84 2026:184 2134:51 2173:101 2342:135 2726:69 2832:86 2896:111 3352:72 3485:124 3578:8 4288:109 4427:33 4597:80 4702:257 4757:44 5043:76 5867:64 5872:138 5937:116 6027:138 6071:404 6272:207 6372:115 6815:112 6866:26 6986:141 7388:82 7494:173 7510:76 7709:212 7844:130 8150:77 8457:75 8754:166 8789:97 9325:118 10043:60 10241:124 10532:124 11036:79 11871:189 11990:118 12209:102 12389:122 12772:108 12916:10 12968:130 13402:89 13881:247 14017:107 14141:46 14184:81 15089:166 15106:156 15474:63 15523:176 15726:79 16029:166 16081:84 16478:22 16701:99 17220:133 17356:107 17371:66 17514:524 17564:104 17574:61 18090:288 18623:103 19604:214 19609:72 19712:115 19758:105 +101:16 203:23 1040:68 1477:54 1515:63 1739:29 1743:24 1987:38 2062:18 2240:27 2345:26 2536:89 2630:92 2711:37 2820:21 2894:46 2990:129 3242:70 3433:14 3515:16 3578:10 3858:35 3870:91 3988:69 4012:77 4103:29 4308:23 4473:132 4489:137 4494:195 4554:38 4576:20 4707:17 4723:18 4751:27 4831:146 5128:16 5204:45 5227:30 5379:51 5869:27 5877:18 5946:97 6234:157 6866:207 6968:87 7055:28 7097:31 7346:52 7483:25 7536:31 7669:30 7709:33 7797:96 8162:78 8247:97 8324:87 8396:36 8433:120 8703:20 8786:139 9056:19 9157:46 9596:31 9707:62 9715:27 9717:143 9834:109 9908:129 9923:22 9987:29 10161:230 10457:13 10532:167 10535:23 10569:34 10636:47 10735:15 10779:50 10805:91 11224:96 11249:15 11685:100 12031:126 12136:70 12238:103 12379:24 12448:24 12547:26 12610:158 12916:26 12996:28 13114:29 13226:20 13435:57 13439:15 13519:30 13607:722 13829:126 13863:24 13878:456 13964:38 14129:47 14161:23 14167:20 14744:40 14926:36 14975:118 14992:26 15226:26 15359:100 15523:43 15726:27 15780:12 15798:20 16364:36 16478:242 16580:36 16880:63 16883:73 16923:31 16952:61 17053:36 17904:26 18669:49 18700:75 19095:21 19172:23 19174:25 19210:16 19255:84 19327:88 19386:9 19809:20 19823:259 19955:30 +303:90 732:51 865:159 1040:19 1810:144 2141:77 2191:51 2990:128 3322:186 3578:9 3788:100 3988:81 4541:240 4614:100 4896:240 4908:132 5057:109 5418:43 6372:72 6759:116 7494:105 7709:660 7848:67 8168:96 8620:145 8632:114 8718:172 8789:76 9614:141 9745:84 9870:112 10161:91 10241:86 10292:86 10408:106 10430:106 10616:152 10628:49 10799:119 11106:48 11126:125 11822:108 11869:158 12081:137 12139:102 12666:110 12673:158 12674:27 12916:14 13604:84 13652:101 14015:38 14161:27 14325:63 14429:128 14532:318 14533:77 15170:150 15294:60 16083:557 17220:96 17978:196 18343:44 18364:20 18594:90 18752:104 18788:371 18920:109 19287:217 +537:38 891:98 1682:120 1987:242 2602:132 2902:97 3307:190 3384:156 3578:22 3702:108 3826:171 3932:151 3976:56 4494:96 4674:150 4722:117 4724:44 4737:100 5178:100 5417:181 5443:168 5750:161 5803:227 5959:68 6234:26 6301:59 6602:94 6866:628 6994:57 7339:159 7355:30 7380:194 7494:187 7709:62 8799:76 8804:146 8871:126 9234:68 9424:88 10241:88 10277:147 10443:94 10532:78 10739:85 10848:61 11217:247 11685:74 12191:92 12608:116 12677:129 12724:141 12798:41 12804:109 12916:3 13395:68 13652:67 14161:33 14361:107 14543:128 14700:73 15050:108 15429:102 15515:75 15523:38 15547:196 15747:93 15780:103 16232:236 16345:124 16411:410 16478:56 16503:43 16593:114 16810:102 16826:117 16997:318 17069:125 17104:70 17522:83 17692:91 18737:177 19139:122 19523:32 19739:268 19769:188 19924:175 +110:36 118:46 464:55 962:439 1040:28 1052:209 1071:156 1699:122 1889:156 1951:304 1980:76 2240:143 2311:118 2528:50 2644:149 2720:59 2990:59 3003:102 3005:303 3272:49 3578:41 3976:44 4494:99 4549:86 4826:128 5000:33 5683:130 5831:53 5946:110 6027:223 6234:188 6287:57 6315:128 6374:82 6442:96 6866:364 6984:55 7483:130 7709:101 8253:117 8548:213 8685:130 8756:107 8821:69 9598:57 10035:97 10083:45 10161:27 10443:112 10532:46 10677:174 10744:108 10779:261 10848:65 11245:110 11401:148 11668:106 11856:127 11871:47 11968:152 12039:96 12238:85 12319:85 12607:98 12610:59 12916:7 13143:81 13212:126 13261:303 13382:89 13384:108 13881:143 14457:91 15017:20 15063:110 15204:21 15723:40 15798:104 16034:42 16084:157 16327:73 16464:113 16909:303 17383:118 17414:25 18115:83 18364:59 18539:105 18964:107 19293:112 19894:88 +268:45 548:89 1082:146 1157:52 1350:59 1866:164 1990:63 2015:56 2446:64 2509:70 2528:135 2617:17 2897:77 2902:74 3189:104 3355:388 3422:89 3578:34 3587:92 3643:81 3696:107 3703:88 3746:77 3906:93 4153:235 4355:137 4595:95 4637:59 4957:50 5227:99 5361:119 5426:109 5487:73 5783:97 5821:121 5867:72 5959:103 6027:64 6153:278 6172:95 6234:139 6374:51 6866:385 6913:58 7027:88 7055:46 7422:227 7494:100 7596:75 7709:67 7746:80 7824:121 7911:11 8416:90 8789:68 8804:104 8862:69 9307:140 9563:85 9614:131 9674:113 9824:77 10280:54 10532:189 10735:48 10794:99 10856:89 11224:63 11246:116 11303:86 11380:255 11685:53 12206:129 12238:70 12916:11 13143:125 13742:106 14067:53 15240:73 15301:94 15433:75 15697:157 15726:90 15772:71 15793:64 16163:94 16432:132 16515:128 16551:119 16584:52 17270:42 17493:194 18012:133 18050:54 18100:177 18115:96 18364:18 18372:85 18388:116 18406:88 18413:88 18436:270 18734:86 18787:138 19109:148 19150:150 19174:120 19739:136 19809:64 +239:65 285:103 311:137 520:99 1052:36 1057:98 1271:102 2463:71 2492:75 2602:110 2751:74 2777:158 2919:135 2934:75 2990:204 3503:15 3578:28 3860:73 3911:99 3918:118 4011:94 4494:142 4512:149 4554:69 4614:51 4723:64 4886:138 5128:112 5699:92 5760:30 5898:166 5956:72 6452:599 6609:388 6754:33 6865:69 6866:50 6927:74 7022:95 7494:54 7625:347 7709:70 7896:86 8408:23 8652:115 8789:41 9325:54 9331:186 9492:83 9717:154 10256:129 10292:47 10532:74 10735:53 11201:31 11685:35 11871:89 12136:84 12798:95 12935:57 12968:175 13305:17 13844:207 13929:64 14064:76 14168:88 14571:93 14732:154 14791:72 16081:160 16581:84 16584:57 16913:77 17402:51 17782:140 17972:200 18907:93 18919:161 19109:30 19204:36 19774:66 19787:106 +247:46 876:62 1052:24 1057:132 1715:46 2329:122 2751:22 3578:19 4278:89 4281:114 4554:47 5159:86 5199:208 5244:124 6439:81 6449:103 6866:488 6927:50 7409:96 7709:151 7797:79 8288:105 8548:66 9006:39 9073:78 10193:104 10532:164 10550:94 10763:58 10923:72 11164:92 12238:29 12363:141 12607:46 12610:28 12798:127 12803:50 12916:7 12968:65 13679:74 13685:47 13829:142 13884:79 13929:66 14161:19 14582:84 14682:105 15117:283 15204:57 15548:103 15586:56 15798:48 15990:113 17408:163 17457:57 17876:114 18323:114 18364:14 18570:248 19160:818 19247:84 19327:72 19386:23 19580:89 19729:57 19787:84 19953:48 +118:95 466:65 480:141 508:167 762:110 1040:33 1057:99 1400:412 1549:43 1565:42 1968:29 2550:76 2751:116 2894:130 2934:97 3086:32 3307:73 3352:52 3400:79 3578:22 3646:121 3882:82 3915:57 3932:74 4047:35 4099:83 4148:80 4346:65 4512:58 4637:38 4986:94 5128:98 5277:96 5647:95 5874:47 5877:39 5959:66 6234:149 6403:67 6574:77 6623:118 6866:102 6927:130 7055:30 7596:433 7709:77 7748:75 7850:40 8027:103 8353:116 8542:81 8548:141 9085:100 9261:67 9502:117 9566:103 9596:98 9776:111 10154:132 10187:100 10241:73 10299:157 10377:37 10444:50 10457:57 10532:86 11125:112 11277:72 11380:41 11726:37 11740:101 11871:164 11965:307 11971:55 12120:71 12139:119 12170:246 12319:34 12599:49 12607:39 12848:89 12916:52 13355:61 13433:59 13829:26 13881:76 13896:194 13931:54 14161:16 14358:52 14756:179 14851:32 14973:77 15303:41 15523:171 15525:65 15772:45 15780:25 16034:170 16087:80 16325:90 16367:57 16381:131 16398:65 16552:54 16574:94 16795:45 16810:68 16992:71 17429:24 17471:91 17772:69 17799:82 18382:106 18821:58 18910:76 19109:2 19216:31 19255:46 19386:20 19678:95 19685:118 19809:41 19921:88 19928:15 19935:53 19973:43 +133:84 203:55 247:44 692:79 732:35 821:33 830:104 987:80 1040:50 1052:47 1057:85 1406:114 1952:59 1968:97 1981:47 2290:51 2345:62 2462:90 2612:189 3578:31 3670:92 3886:274 3967:82 4278:86 4328:53 4489:64 4526:48 4576:94 4614:34 4764:97 5057:37 5361:87 5698:77 5874:106 5877:44 5900:70 6040:117 6100:74 6234:119 6274:47 6315:115 6372:127 6771:86 6866:324 6927:49 6964:163 7229:59 7363:74 7494:201 7560:57 7611:38 7673:73 7709:127 7850:45 8113:63 8154:70 8320:95 8420:152 8548:32 8903:88 8943:92 9221:160 9403:103 9546:82 10324:144 10470:65 10532:12 10735:70 10936:142 11070:30 11422:72 11487:64 11572:77 11631:56 11685:73 11691:118 11781:96 11788:144 11822:73 11871:16 12094:633 12448:29 12607:132 12798:95 12804:121 12916:14 13151:68 13287:87 13691:108 13881:49 13929:42 13931:122 14305:89 14375:45 14458:33 14697:85 14844:51 15054:58 15058:76 15226:61 15303:46 15424:88 15448:36 15780:28 15901:87 15902:78 16162:72 16176:137 16367:129 16399:94 16568:29 16584:38 16826:2 16913:51 17423:105 17742:11 17949:58 18281:118 18314:56 18364:67 18971:58 19095:50 19109:95 19247:22 19293:156 19386:135 19756:75 19834:58 +446:55 706:248 896:67 939:140 981:184 1040:22 1152:198 1244:142 1798:407 1981:39 2185:119 3174:89 3295:173 3722:92 3831:63 3882:80 4472:107 4598:246 4750:97 5128:128 5189:28 5361:75 5435:129 5821:152 6285:119 6602:90 6761:29 6865:78 6866:23 6994:117 7129:116 7287:29 7371:371 7709:114 8160:99 8301:235 8642:60 8813:144 8826:185 8913:87 9390:270 9998:99 10200:98 10250:211 10443:120 10744:84 10848:147 11085:69 11871:60 12219:82 12916:3 13151:117 13384:112 13529:89 13881:152 14145:74 14313:130 14585:109 14880:72 15697:198 15818:148 16157:107 16464:88 16511:159 16810:75 16918:155 17158:85 18121:122 18364:23 18400:153 18737:63 18961:101 19255:43 19803:140 +201:140 225:75 247:45 520:99 637:16 732:35 1040:26 1052:48 2015:84 2240:66 2311:55 2376:155 2528:92 2530:68 2634:75 2720:106 2751:166 3337:120 3578:32 3713:60 3870:55 4008:64 4048:90 4345:83 4369:89 4472:64 4707:42 4957:70 5128:38 5783:115 5821:84 5899:142 5900:144 5956:80 6234:140 6865:139 6866:150 7002:589 7051:122 7080:102 7709:268 7829:84 8548:33 8839:80 8925:37 10109:28 10276:32 10345:67 10443:145 10457:16 10532:70 10848:154 11245:78 11501:98 11559:98 11668:49 11859:149 11871:338 12039:86 12206:4 12238:82 12348:95 12598:95 12610:82 12721:49 12844:75 12916:33 13542:637 13881:234 13964:47 14161:18 14413:147 14697:44 15294:82 15315:78 15492:78 15853:60 15917:129 16496:66 16525:110 17519:137 18281:53 18364:55 18545:41 18768:110 19386:46 19774:89 19889:55 19921:68 +45:396 74:128 415:147 904:110 1268:69 2662:421 2934:134 3028:53 3307:69 3433:160 4557:93 4799:106 4820:54 5655:114 5670:111 5956:49 6084:558 6234:91 6282:67 6500:154 6866:231 7027:304 7206:97 7709:333 8789:123 8804:66 9078:50 9213:62 9325:157 9511:123 9946:86 9948:63 10155:117 10172:36 10532:82 10675:110 10848:161 11406:112 11695:68 11871:165 12007:104 12206:44 12675:78 12917:9 13102:70 13143:83 13725:105 14067:184 14325:105 15411:35 16367:308 16478:81 16713:326 16905:99 16980:90 17067:116 17853:102 18397:65 18410:73 18841:63 18868:48 19247:130 19525:141 +268:159 492:146 1040:30 1044:117 1052:28 1209:109 1279:163 1298:86 1363:13 1914:45 1951:110 1952:214 1990:55 2062:51 2185:114 2297:52 2339:51 2404:131 2564:147 2661:119 2690:63 3095:107 3150:72 3361:109 3433:41 3515:91 3578:30 3590:117 3643:109 3976:75 4342:55 4470:90 4554:164 4573:105 4723:51 4831:51 5128:45 5507:61 5612:69 5796:131 6236:87 6336:285 6866:596 7164:124 7494:4 7669:87 7709:138 7729:56 7805:61 7850:55 8078:201 8262:120 8306:80 8433:71 8703:56 8759:48 8906:107 9166:59 9424:58 9526:77 9671:111 9771:78 9989:61 10241:151 10276:114 10292:100 10446:68 10457:19 10532:151 10622:144 10848:69 10925:62 11002:85 11322:116 11328:56 11513:133 11518:55 11588:65 11781:58 12085:49 12448:104 12582:90 12599:84 12916:27 12935:46 13050:89 13192:95 13288:57 13337:71 13440:64 14138:94 14156:10 14161:87 14197:104 15071:289 15111:163 15168:61 15552:315 16037:130 16232:262 16349:135 16867:23 16913:61 16959:62 17015:16 17061:146 17104:93 17173:42 17272:62 17318:210 17334:129 17508:117 17532:95 17961:118 17990:83 18007:78 18115:148 18253:138 18364:32 18401:111 19174:70 19232:140 19278:39 19609:62 19636:98 19873:69 19935:72 +12:62 1040:89 1450:36 2025:110 2243:79 3578:22 3977:32 4176:47 4514:71 4857:439 5507:359 5946:106 6866:398 7494:98 7682:89 7709:144 7790:86 7848:107 8078:99 8325:136 8786:197 8789:80 9782:520 9948:89 10241:70 10443:131 10532:97 10753:64 10795:171 10799:140 11007:5 11106:154 11326:71 11617:131 11884:118 12191:88 12205:101 12319:136 12342:403 12916:11 13638:41 13829:88 13863:202 13881:70 14107:90 14158:165 14510:68 14820:237 15674:66 15879:112 16019:56 16029:92 16810:138 17306:68 17782:105 18697:43 18779:67 19109:100 19174:209 19822:54 +101:47 257:132 869:109 1040:16 1052:30 1779:103 1968:41 2894:47 3264:35 3372:18 3433:43 3578:16 4209:50 4377:472 4416:148 4830:136 5305:56 5558:115 5760:105 5783:43 5946:45 6234:97 6245:150 6404:104 6702:149 6866:317 7055:43 7217:142 7240:20 7494:89 7709:169 8349:83 8652:129 9109:100 9325:47 10161:123 10532:78 10773:73 10797:130 10848:11 11069:117 11350:83 11401:170 11406:70 11463:107 11695:107 11822:92 11871:271 12448:37 12463:41 12534:124 12564:85 12777:34 12903:260 12916:16 12982:109 13715:83 13881:78 14375:227 14392:127 15411:52 15424:112 15523:104 15785:145 16006:58 16044:105 16218:367 16452:64 16584:96 16876:138 16880:137 17017:73 17526:437 17799:236 18109:81 18241:12 18364:17 18383:123 18493:61 18916:142 19386:29 19455:69 +173:99 569:32 754:123 1273:141 1715:74 2076:94 2389:113 2528:29 3051:94 3242:139 3578:10 4510:101 4576:79 4614:56 4778:127 5081:75 5163:125 5316:71 5561:109 6306:68 6315:96 6599:228 6866:70 6927:81 7086:94 7145:177 7306:99 7388:45 7709:346 7729:77 7773:107 8099:71 8217:1 8408:79 8433:121 8505:41 8703:77 8821:30 8943:77 9079:491 9596:124 9987:58 10244:88 10457:27 10786:85 10868:45 10937:139 11695:90 11871:122 12090:72 12406:95 12610:89 12669:165 12680:200 12713:1 12732:77 12916:21 13146:158 13346:136 13679:125 13907:92 14461:35 14485:177 14665:49 14918:88 14982:140 15355:59 15390:51 15640:326 16241:59 16810:157 16881:83 17173:58 18266:89 18372:123 18814:79 19024:56 19255:105 19311:63 19685:44 19776:612 +112:50 205:100 732:27 1057:125 1522:109 1704:102 1886:94 1889:126 1968:50 2028:128 2528:172 3578:24 3617:255 3816:66 3976:85 4055:78 4173:131 4460:104 4646:95 4820:91 4830:84 4869:81 4957:51 5096:1 5270:85 5424:75 5660:67 5724:128 6234:189 6866:482 7080:25 7089:91 7143:44 7363:29 7494:118 7709:437 7729:36 7856:107 7886:61 7893:42 7947:46 8078:87 8099:33 8463:56 8500:103 8727:88 8789:104 8801:71 8817:124 8970:149 9501:172 9564:130 9763:88 9923:81 9984:106 10051:727 10161:124 10532:176 10677:61 10797:160 10848:80 11106:114 11400:71 11668:74 11685:18 11971:194 12448:23 12555:64 12677:30 12692:114 12968:77 12996:53 13151:53 13685:36 14040:60 14429:151 15201:43 15448:78 15791:83 16034:70 16223:136 16478:25 17060:48 17083:37 17272:40 17649:191 17782:101 18007:66 18119:51 18520:91 18896:98 19094:56 19109:109 19144:123 19221:300 19386:17 19834:45 19992:78 +80:118 247:98 407:228 1040:55 1052:52 1057:154 2281:20 2311:118 2538:79 2725:156 2990:288 3051:37 3515:84 3578:27 4193:250 4492:52 5162:79 6234:30 6865:200 6866:205 7390:49 7445:470 7507:96 7709:185 7896:136 8214:141 8320:133 8548:71 8710:102 9109:86 9180:86 9195:276 9473:155 10167:117 10483:121 11183:221 11871:203 12098:191 12547:137 12552:443 12610:295 12916:25 13243:162 13404:65 13529:93 13829:36 13931:135 14161:40 14260:45 14556:83 14766:79 14880:12 15336:63 15378:57 15448:22 15771:92 15780:63 16529:48 16810:156 18364:30 18821:146 19079:138 19120:14 19327:77 19386:99 19777:83 19921:73 +96:128 294:136 596:40 825:69 981:42 1158:339 1166:46 1279:54 1303:99 1421:89 1569:79 1704:110 1715:35 1987:35 2467:74 2522:145 2751:70 2934:29 2990:46 3308:30 3345:63 3578:20 3603:89 3645:137 3661:120 3915:51 4011:67 4308:85 4494:82 4526:76 4687:45 4711:52 4722:51 4820:55 5163:32 5946:82 6372:97 6599:108 6866:229 6927:39 7086:139 7293:97 7406:66 7527:123 7560:45 7596:43 7709:64 8079:53 8297:124 8361:55 8562:757 8663:14 8720:81 8759:117 9298:89 9328:130 9440:61 9511:142 9987:28 10010:51 10229:50 10260:66 10299:70 10350:58 10443:166 10470:102 10532:77 10535:86 10568:108 10848:144 11106:46 11122:117 11572:121 11695:114 11871:88 12209:66 12319:30 12755:85 13355:58 13829:253 13881:40 14067:31 14161:14 14638:20 14851:28 14880:13 14969:68 15058:60 15523:114 15640:154 16094:53 16187:62 17062:79 17272:41 17426:79 17457:44 17963:108 18002:33 18281:91 18413:102 18559:165 18879:209 18894:44 19117:45 19255:41 19386:18 19707:154 19774:34 19926:112 +80:89 956:208 989:84 1040:42 1052:39 1820:71 2015:136 2186:123 2481:133 2894:61 2983:721 2990:190 3578:10 4200:87 4494:8 4510:132 4554:75 4573:109 4831:139 5305:74 5783:85 5877:73 6097:82 6168:99 6347:106 6815:81 6866:377 6995:130 7250:105 7546:208 7709:145 7729:77 7848:95 7853:27 8078:92 8154:233 8347:92 8416:109 8548:53 8737:234 8912:32 9524:8 9948:83 10241:90 10443:101 10532:135 10783:116 10799:72 10848:81 11018:95 11358:140 11685:316 11695:68 12319:127 12448:48 12582:93 12610:222 12916:37 12935:63 13881:182 13929:35 14161:30 14222:81 14245:129 14656:65 14711:60 15294:67 15774:117 16382:76 16581:93 17005:118 17374:182 17961:8 18364:44 18372:113 18916:16 19109:108 19120:116 19210:61 19255:92 19327:116 19386:37 19455:91 19707:113 +220:108 235:115 268:24 466:54 657:41 659:43 711:59 732:51 891:40 991:69 1040:18 1181:55 1244:30 1313:42 1427:47 1433:114 1522:103 1782:101 1951:33 1990:67 2062:31 2592:76 2679:82 2720:123 2726:130 2894:27 2897:41 3040:106 3174:76 3187:33 3237:43 3433:49 3578:45 3876:57 4191:276 4237:106 4518:128 4521:47 4603:45 4614:25 4707:30 4820:64 5178:41 5487:151 5551:68 5775:106 5877:32 6007:591 6234:111 6511:61 6549:40 6602:38 6866:730 7052:59 7099:71 7144:56 7338:64 7384:65 7390:141 7392:23 7475:113 7494:354 7567:76 7632:77 7679:75 7709:60 7739:28 7841:242 7848:81 7947:43 8151:126 8320:118 8404:55 8454:88 8465:97 8475:63 8477:187 8514:29 8536:80 8548:70 8555:67 8605:61 8737:103 9096:90 9244:67 9460:64 9565:56 9766:124 9772:75 9987:103 9989:74 10037:122 10161:16 10238:45 10241:147 10280:75 10443:120 10457:47 10532:149 10663:53 10991:94 11085:56 11092:98 11159:67 11168:152 11213:136 11545:42 11617:83 11631:41 11801:105 11808:117 11824:81 12238:21 12448:21 12610:39 12622:105 12804:44 12916:9 13013:82 13042:90 13043:203 13050:54 13288:69 13433:49 13561:58 13884:50 14144:55 14158:34 14404:29 14501:148 14653:52 14880:89 14992:163 15123:83 15398:83 15427:41 15586:39 15780:63 15793:34 16478:45 16552:45 16676:79 16881:37 16912:39 17173:178 17264:61 17297:51 17299:115 17420:121 17520:56 17571:44 17784:41 17949:85 17954:77 17970:30 18037:44 18397:157 18599:92 18711:51 18734:46 18779:69 18821:48 18911:50 18964:115 19041:44 19120:48 19174:150 19210:108 19354:68 19921:73 19973:72 +6:116 520:72 626:275 1040:273 1057:107 1506:150 1763:95 1809:124 1968:176 2112:143 3084:173 3456:132 4087:85 4580:99 4820:54 5387:602 5586:77 6060:137 6234:54 6668:101 6866:146 7709:208 8320:125 8671:96 8789:72 8799:3 8804:116 9148:2 9643:157 10161:108 10239:94 10244:431 10443:207 10532:93 10860:36 11501:36 11663:81 11685:103 11805:150 11994:314 12798:37 12916:87 12968:68 13485:36 13929:115 14239:189 16019:112 16838:87 17487:171 18115:98 18323:6 19254:86 19685:291 19707:131 +219:98 755:118 948:489 989:120 1273:98 1472:72 3360:90 3361:97 3670:65 3864:137 3867:140 4587:149 4699:57 4809:56 4925:291 5417:65 5589:439 5783:137 6027:86 6101:179 6234:1 6866:445 7194:13 7494:181 7709:235 8172:411 8556:155 8614:56 8804:86 9089:130 10241:125 10276:141 10341:90 11153:113 12319:171 12506:49 12582:86 12704:109 12916:14 14209:51 14709:53 14793:44 15156:79 15747:15 16193:114 16478:113 16635:28 16810:186 17487:141 17648:66 18115:82 18364:60 18397:10 18797:96 19109:97 19327:156 19330:464 +410:101 626:200 1052:93 1252:73 1455:47 1889:114 2528:36 2751:143 2934:66 3377:136 4557:99 4799:80 5540:65 5783:206 5890:233 6060:185 6234:78 6285:49 6378:131 6866:205 7149:10 7407:197 7494:121 7709:25 7715:77 7739:150 8055:128 8148:411 8556:108 8838:397 9148:63 9375:24 9530:66 9792:54 10035:53 10244:105 10276:125 10443:32 10516:117 10702:73 10799:87 10833:215 10955:144 11006:183 11271:40 11310:508 11802:86 11871:20 12555:107 12732:183 12798:222 12916:19 13130:42 13387:111 13686:93 13881:146 13965:58 14026:33 14239:137 14862:124 15144:128 15911:26 15931:82 16810:176 16914:46 17890:46 18115:43 19278:127 19313:9 19539:272 19607:101 +80:95 1052:42 1057:61 1316:36 1549:86 1584:44 2093:70 2446:70 3208:82 3578:22 3829:63 3907:18 3976:146 3988:97 4576:167 4921:78 5128:66 5683:36 5783:189 5864:52 5877:78 5946:57 6234:35 6315:204 6865:80 6866:372 6927:87 7229:104 7632:56 7709:140 7729:82 8081:105 8099:76 8253:45 8475:142 8548:57 8892:80 9180:69 10161:75 10457:28 10532:156 10779:209 10795:45 10814:523 11159:144 11668:84 11871:83 12136:99 12173:87 12610:47 12612:107 12916:6 13143:149 13639:178 14161:32 15174:34 15355:151 16312:672 16786:53 17017:102 17267:83 17899:137 18364:24 18752:15 19255:79 19313:92 19327:62 19386:40 19926:42 +101:60 572:45 830:168 1110:52 1205:43 1436:137 1513:83 1968:52 1981:79 2269:129 2737:150 2751:64 3578:30 3664:97 3908:178 4238:96 4376:12 4450:66 4614:164 4723:68 4986:133 5057:60 5128:60 5446:104 5630:70 5783:72 6073:82 6866:528 7494:51 8349:106 8608:97 8804:159 9581:111 9654:133 10043:92 10206:85 10443:113 10460:88 10530:91 10532:119 10735:171 10848:51 11325:30 11699:27 11871:151 12582:55 12598:44 12814:22 12916:3 12935:62 13881:66 13884:82 13929:103 14999:37 15490:664 15763:71 15795:186 15888:89 16034:108 16053:80 16223:104 16504:14 16542:167 16584:61 16810:165 16905:45 17007:99 17017:93 17411:456 18372:54 18703:214 18728:135 18916:58 19210:120 19255:24 19263:134 19386:73 19468:83 19809:75 +554:107 1057:108 2092:108 2659:408 3505:189 3707:100 4328:257 4404:50 5367:339 5472:92 6027:111 6348:78 6792:46 6866:641 7208:47 7494:156 7577:153 7709:172 7909:39 8261:733 8666:304 8925:81 10187:76 10241:102 10532:165 11245:48 11559:7 12238:4 12402:43 12480:48 12610:130 12638:53 12916:16 13384:106 14156:57 15127:87 15411:56 15674:89 15878:113 16343:148 18115:132 18324:60 18364:65 18624:81 18800:167 19034:150 19076:130 19180:149 19255:64 19342:92 19758:58 19926:65 +96:72 118:33 488:81 561:303 569:64 596:45 891:144 1052:41 1203:216 1244:145 1427:56 1481:54 1543:103 1609:23 1889:30 1990:81 1993:81 2145:129 2339:75 2345:54 2720:94 2751:11 2910:103 2990:19 3112:94 3213:48 3433:30 3578:22 4402:72 4460:118 4526:42 4614:59 4722:342 5010:144 5052:116 5163:54 5178:49 5447:99 5936:127 5959:33 6034:93 6280:76 6372:84 6656:94 6681:122 6846:72 6866:95 6876:147 7055:29 7119:88 7182:58 7371:63 7494:205 7681:96 7709:274 7844:93 8143:255 8164:283 8339:110 8420:79 8504:127 8720:136 8759:71 9046:71 9321:131 9614:42 10218:49 10276:28 10457:42 10532:246 10848:163 11684:68 12320:46 12355:31 12632:181 12916:14 13384:27 13570:377 13881:50 13902:432 14161:32 14325:35 14543:116 14697:38 15355:126 15492:34 15523:60 15605:32 15903:88 16112:57 16225:67 16478:49 16576:37 17692:128 17947:73 18768:47 18938:87 18944:46 19174:52 19386:20 19429:58 19592:63 19787:95 19926:83 +151:181 626:132 822:99 1040:262 1110:69 1688:124 1691:45 1914:39 1968:135 2098:43 2213:62 2275:78 2898:113 2899:45 3150:63 3337:61 3804:79 4124:119 4707:173 4723:176 4869:111 5041:46 5181:104 5446:124 5802:138 6060:86 6362:134 6373:19 6449:52 6866:630 6984:59 7086:98 7448:96 7494:67 7682:126 7709:406 7924:139 8152:40 8217:104 8804:62 9033:144 9117:77 9157:57 9162:36 9527:128 9530:122 9596:233 9834:99 9860:56 10035:23 10161:66 10229:66 10244:441 10264:149 10381:68 10457:100 10489:84 10532:250 10799:145 10844:154 10848:131 11133:146 11245:84 11330:46 11388:65 11822:38 11871:70 11957:24 12170:219 12206:36 12610:168 12732:48 12916:136 13165:235 13428:54 13523:64 13631:93 13929:22 14138:82 14239:36 14615:79 14665:31 14774:117 14947:113 15723:99 15859:83 15933:54 15948:112 16006:96 16441:75 16518:120 16997:138 17003:57 17422:45 17487:132 17526:72 18364:279 18493:50 18720:79 19255:42 19473:137 19685:84 19908:106 +204:96 1057:80 1082:91 1968:181 2612:529 2798:22 2858:32 2947:87 3051:66 3289:59 4021:15 4297:83 4306:66 4744:101 5524:74 5760:146 5937:66 6227:104 6826:70 6866:609 7129:2 7494:74 7709:210 7850:254 8098:70 8475:127 8799:175 8918:51 9730:114 10443:242 10535:304 10735:196 11069:93 11164:88 11400:211 12094:589 12206:90 12238:49 12674:127 12704:78 12858:107 12917:95 12968:62 13360:44 13378:111 13384:76 13389:65 13529:110 13725:171 14067:216 15211:83 15763:98 15777:196 15933:132 17457:311 17639:63 19024:29 +626:173 1040:171 1914:128 1972:110 1995:109 2611:65 2684:104 2720:119 3180:63 3307:67 3505:86 3772:53 4723:144 4730:38 5220:10 5379:98 5446:72 5937:47 5946:47 6060:95 6234:171 6295:105 6666:95 7494:91 7709:378 7775:119 8759:99 8804:63 8813:57 8986:184 9006:91 9975:468 10161:92 10187:98 10241:111 10244:181 10385:143 10457:55 10532:26 11106:15 12170:119 12595:239 12599:65 12675:132 12916:49 13759:468 13829:56 14665:100 15326:202 15578:169 15980:436 15990:60 16172:152 16478:23 16568:93 16810:85 17487:108 17621:158 18110:148 18364:91 18372:83 18880:13 19109:111 19515:151 19685:274 19746:35 +9:84 446:44 1001:2 1513:137 2374:84 2496:115 2934:30 2990:56 3036:55 3038:73 3122:81 3308:34 3578:6 4232:105 4470:73 4633:98 4637:40 4699:157 4925:179 5589:270 5740:136 6347:46 6374:34 6409:133 6449:46 6602:48 6866:71 6995:47 7055:123 7336:121 7494:77 7518:70 7729:42 7758:97 7841:60 7926:70 8433:75 8488:69 8720:71 8799:132 9006:69 9221:98 9446:94 9528:97 9763:80 9782:67 9989:46 10241:51 10318:89 10448:86 10490:57 10532:482 10848:77 11085:9 11106:69 11156:75 11588:148 12238:185 12448:53 12704:126 12751:115 12916:6 12968:173 13337:54 13881:63 14087:483 14195:52 14313:23 15135:75 15378:140 15586:49 16026:46 16638:440 17270:49 17279:80 17896:149 18065:71 18091:550 18943:148 19083:53 19473:40 +101:67 1462:412 1715:80 1906:69 1972:172 2460:246 2493:139 2760:63 3361:65 3490:166 3578:11 3798:176 4173:151 4485:86 4510:77 4614:60 4675:129 4721:75 4906:59 5379:49 5724:143 5760:142 6073:107 6153:145 6224:123 6502:603 6866:628 6927:88 7355:28 7494:128 7709:523 7896:118 8349:118 8475:7 9101:49 9734:79 9827:237 10161:35 10532:71 10848:225 11094:51 11187:149 11822:65 12238:105 12620:159 13012:43 13390:289 13453:73 13685:164 13846:127 13929:38 14915:239 15798:84 15844:43 16029:34 16034:134 16662:85 16810:73 16967:57 17205:105 18115:43 18343:52 18737:80 19483:79 19707:59 19790:40 19926:90 19935:107 +1251:172 1467:134 1810:111 2269:175 2792:161 2823:118 2922:27 2957:108 4494:64 4831:145 5417:109 5431:120 5487:34 6060:109 6234:72 6444:29 6662:222 6866:303 7255:622 7474:66 7494:149 7709:142 8437:36 8518:72 8759:31 8789:109 9614:224 9966:86 10161:78 10292:7 10443:41 10532:74 11304:99 11642:48 11871:93 12170:164 12406:57 12704:72 13224:26 14239:164 14810:59 14821:101 14880:136 14980:60 15688:66 15803:88 16160:33 16771:42 17077:31 17357:396 17753:329 17799:438 18190:163 18286:65 18376:105 19024:98 19100:71 19391:107 19699:159 19787:63 +268:59 527:66 1315:183 2015:145 2062:74 2112:105 2185:89 2339:151 2414:143 2867:122 2927:104 3270:106 3361:59 3373:89 3436:341 3549:88 4453:115 4460:119 4469:141 4505:90 4526:85 4755:91 4995:236 5057:65 5298:41 5379:53 5783:154 5867:186 6431:51 6866:327 7003:28 7055:59 7494:236 7709:360 8150:226 8253:101 8416:116 8586:148 8667:86 8703:82 8804:197 9948:142 10024:157 10112:69 10241:189 10277:144 10431:138 10442:189 10543:611 10675:79 11272:126 11624:111 11828:51 12347:72 14067:69 14385:85 14851:64 14947:72 15201:68 15292:104 16029:45 16586:38 16805:143 16810:67 17104:68 18115:197 18479:102 18694:118 18700:83 18916:131 18964:129 19109:16 19282:48 19739:88 19786:51 +40:36 115:65 203:30 247:24 268:107 548:35 657:30 1040:7 1044:154 1070:26 1245:50 1279:37 1560:38 1640:55 1743:62 1798:43 1802:42 1824:113 1990:25 2194:28 2269:134 2290:82 2311:29 2339:23 2446:7 2455:59 2530:36 2630:208 2728:47 2739:73 2934:172 3296:26 3361:96 3402:41 3410:74 3515:20 3578:20 3582:40 3786:71 3860:52 3902:99 4412:32 4569:35 4594:45 4597:35 4797:55 4923:114 4976:45 5068:107 5131:29 5233:30 5249:58 5426:73 5551:25 5611:66 5874:29 5934:33 5959:20 6102:66 6234:76 6372:54 6374:140 6602:28 6629:44 6662:25 6676:50 6695:66 6866:341 6889:47 7061:91 7104:314 7189:113 7320:32 7363:40 7406:45 7475:42 7477:129 7669:78 7709:107 7725:179 7729:25 7748:74 7822:273 7850:25 7938:34 7953:62 8078:165 8262:27 8417:141 8554:55 8703:25 8737:51 8789:97 8804:113 8865:64 9019:66 9032:61 9180:84 9221:116 9318:63 9332:48 9631:92 9715:35 9755:122 9998:31 10276:34 10377:22 10394:136 10457:9 10470:35 10532:221 10582:518 10626:49 10971:72 11075:68 11106:109 11136:46 11181:151 11342:21 11380:50 11400:99 11426:66 11481:10 11518:25 11684:84 11936:132 11965:94 11969:40 12104:77 12116:41 12161:30 12448:16 12605:39 12632:87 12798:98 12916:6 13067:47 13288:51 13337:32 13363:54 13435:37 13437:86 13504:192 13561:85 14161:10 14385:13 14556:20 14758:80 15226:33 15396:106 15430:99 15523:53 15600:50 15780:15 15793:50 15869:89 16029:64 16034:201 16213:68 16317:77 16344:82 16406:34 16432:52 16542:56 16580:47 16581:30 16584:20 16590:93 16631:114 16673:65 16789:65 16912:57 16913:110 17017:31 17079:30 17104:42 17319:42 17457:30 17471:84 17683:60 17711:28 17756:43 17771:38 17961:152 18050:21 18364:7 18503:48 18527:44 18544:47 18594:33 18633:142 18780:139 18869:63 19068:51 19094:112 19095:54 19109:71 19174:32 19201:55 19290:54 19327:76 19353:37 19430:65 19645:122 19812:115 19910:36 19973:53 +572:32 865:49 953:94 1040:36 1052:34 1071:46 1968:47 2136:148 2866:112 2894:53 2990:140 3507:124 3578:27 3689:201 3932:60 4308:155 4576:68 4732:75 4764:80 5323:103 5361:63 5466:147 6234:109 6306:13 6406:24 6759:95 6859:85 6864:22 6991:98 7129:64 7494:217 7709:241 7908:110 7965:66 8274:157 8374:93 8703:67 8774:48 8786:80 8945:75 8958:84 9207:83 9596:54 10004:282 10206:76 10241:109 10793:417 11224:66 11856:83 11871:276 12136:81 12355:50 12380:114 12448:42 12491:69 12610:78 12916:14 13226:100 14059:100 14067:56 14161:26 14548:114 14686:191 14730:71 14830:557 15071:116 15541:57 15586:78 15605:29 15678:93 15780:41 16223:93 16478:101 16488:95 16810:96 16878:88 16936:86 17523:83 17632:343 18038:161 18364:39 18456:177 18559:202 18594:175 18920:50 18922:62 19109:69 19386:130 19774:63 19805:82 19894:147 +520:147 626:269 862:74 1012:72 1052:126 1071:71 1914:200 2849:97 2858:110 2960:25 3123:118 3661:90 3682:73 3929:8 4415:85 4510:95 4957:130 5683:98 5783:73 6023:47 6047:157 6234:45 6285:132 6866:227 7709:89 8177:110 8548:171 9006:62 9379:325 9389:58 9529:45 10161:71 10244:282 10448:142 10532:193 10799:126 11181:83 11253:126 11263:102 11303:109 11871:272 11994:308 12499:37 12529:75 12610:143 12798:85 12916:77 13032:8 13143:78 13369:39 13726:452 13881:260 13897:175 13907:84 14177:172 14239:185 14665:156 14880:44 14946:78 14973:232 15103:14 15442:92 15777:30 15812:327 16286:70 16342:97 16465:113 16566:148 17069:158 17086:116 17220:130 17487:168 17520:57 17849:82 17855:59 18409:109 18740:61 19231:84 19255:110 19465:114 19553:114 19685:286 19966:129 +165:111 303:32 312:156 561:36 596:54 1040:7 1244:87 1427:33 1472:115 1565:100 1665:102 1743:30 1824:22 2062:44 2389:36 2645:72 2711:46 2720:63 2894:19 2976:68 3086:19 3132:160 3187:120 3361:66 3578:33 3882:24 3896:31 4025:103 4328:28 4526:25 4597:35 4614:18 4752:72 4768:107 5043:66 5305:23 5417:78 5532:114 5551:25 5877:23 5934:32 5945:26 5959:39 6154:63 6234:105 6272:90 6368:87 6662:25 6703:115 6780:94 6790:23 6865:24 6866:317 7055:35 7132:54 7494:222 7709:102 7739:20 7868:61 7908:82 7997:101 8460:178 8466:43 8632:40 8804:161 8934:151 8986:57 9056:24 9119:37 9179:63 9565:21 9614:50 9711:61 9923:82 9941:37 10115:187 10161:95 10241:86 10457:67 10532:55 10620:24 10663:152 10735:55 10799:150 10848:100 10957:95 10999:125 11215:47 11342:40 11380:98 11644:227 11822:19 12105:108 12319:20 12581:62 12687:40 12916:24 13233:101 13469:64 13567:57 13662:60 13738:32 13854:104 14161:28 14737:31 14927:49 15115:27 15174:127 15204:41 15262:168 15448:87 15512:42 15868:42 16006:24 16264:102 16609:39 16651:390 16829:144 16912:28 16950:51 17021:48 17044:65 17189:36 17203:123 17206:66 17264:174 17267:25 17942:456 18050:21 18153:89 18157:118 18198:48 18329:72 18516:328 18881:328 19041:126 19109:155 19210:58 19921:70 19973:26 19980:100 19990:89 +520:185 572:94 617:10 774:129 1052:39 1057:71 1268:24 1676:66 1810:64 2025:97 2332:93 2413:164 2990:57 3084:40 3242:137 3578:20 4075:65 4308:265 4795:138 5128:61 5938:145 5959:62 6060:97 6374:61 6813:104 6859:96 6866:472 6927:81 7177:104 7494:116 7709:216 7850:75 8203:109 8548:53 8804:72 8925:79 9300:21 9565:49 10035:138 10161:110 10188:73 10206:172 10520:117 10635:165 10848:91 11007:131 11164:73 11871:84 12610:88 12916:18 13006:64 13639:165 13863:93 13881:77 13929:35 13999:6 14015:66 14161:59 14258:172 14699:110 15019:89 15231:43 15659:71 15875:59 16019:39 16312:312 16584:62 16810:173 16882:83 16995:79 17138:131 17160:32 17362:75 17399:94 17876:729 17882:139 17961:108 18364:22 19109:198 19247:68 19278:53 19522:54 19774:144 19932:114 +247:50 349:86 1040:14 1057:106 1260:94 1316:130 1990:52 2528:41 2693:39 2890:124 2990:155 3578:21 3776:75 4080:36 4113:89 4382:139 4576:54 4699:73 5289:468 5627:70 5991:150 6124:83 6221:74 6234:208 6347:167 6866:89 7055:38 7252:143 7444:8 9446:39 9500:15 9585:43 9936:143 10532:187 10590:150 10650:67 10848:163 11446:97 11668:109 11864:104 11871:88 12157:60 12448:33 12610:91 12675:4 12870:62 12916:9 13165:103 13190:532 13469:36 13829:91 13891:71 13929:48 13964:52 14161:21 14572:80 14851:41 14880:34 15102:87 15223:104 15376:83 15777:136 15798:107 16373:71 16478:152 16810:113 17657:77 17795:143 17922:53 18372:153 18911:25 19053:99 19085:86 19386:26 19430:548 +580:111 953:86 1040:110 1835:150 2290:89 2325:185 2536:92 2857:90 2902:95 3174:91 3578:11 3736:108 4489:147 4724:169 5077:136 5507:177 5566:71 5815:113 6234:118 6632:85 6705:114 6865:80 6866:247 7088:114 7392:70 7395:57 7709:127 8078:195 8215:78 8786:583 8903:44 9782:128 9860:282 10241:107 10532:300 10600:462 10932:122 11205:156 11350:115 11518:81 11642:56 11709:57 11871:70 11943:71 12007:130 12130:61 12206:48 12319:67 12547:109 12798:160 12882:164 12916:45 13140:195 13498:49 13658:75 13870:141 13881:55 14047:57 14158:81 14161:95 14820:117 14848:80 15291:51 15785:85 15896:72 16124:84 16239:98 16306:76 16403:23 16464:89 16524:105 16810:30 17752:83 17805:132 17939:156 18115:126 18435:127 18599:77 18779:134 19174:103 19293:118 19327:61 19390:16 19707:30 19923:91 +80:88 989:64 1052:39 1059:78 1549:79 1638:66 2172:74 2621:69 2751:57 2867:113 3121:79 3578:20 3882:152 3976:120 4723:69 5032:91 5128:61 5466:88 5724:123 6089:112 6234:147 6409:86 6866:261 7494:238 7510:56 7584:72 7709:352 8489:83 8913:82 10154:3 10161:109 10441:206 10530:92 10532:173 10773:94 10814:484 10853:216 11052:113 11106:61 11264:49 11342:63 11822:60 12064:99 12136:91 12320:132 12607:217 12610:88 12781:86 12916:13 13639:164 13741:46 13854:32 13878:137 13881:111 13967:118 14161:29 14310:100 14357:34 14682:92 14947:51 15359:137 15777:98 15798:77 15853:193 16312:621 16478:79 16581:91 17060:72 17249:31 17267:77 18072:117 18110:46 18364:22 19243:180 19327:57 19634:112 +748:93 1548:14 2062:57 2182:90 2934:93 2971:33 2990:18 3051:157 3307:96 3412:64 3578:8 3664:81 3988:75 4085:56 4346:178 4637:236 4684:103 4857:168 5417:124 5702:126 6021:79 6067:458 6227:113 6234:115 6790:181 6866:268 7182:90 7443:110 7494:89 7518:104 7630:51 7709:484 7755:67 8548:44 8632:628 8789:100 8799:104 8840:94 9056:63 9766:34 9800:6 9987:48 10128:38 10157:360 10241:77 10443:12 10575:82 10744:67 10848:45 10913:86 11011:116 11106:21 11161:91 11245:50 11989:30 12238:175 12355:49 12916:26 13234:90 13355:33 13606:108 13881:112 13982:67 14569:62 14952:60 14973:178 15204:66 15777:144 15793:63 16034:190 16296:36 16398:100 16881:68 17173:47 18295:151 18397:131 18723:93 18800:152 19109:143 19210:50 +173:108 956:229 1040:23 1052:43 2213:108 2751:85 2851:107 3170:188 3578:11 3586:121 4376:74 4455:165 4614:123 5016:387 6153:371 6268:154 6396:251 6866:416 7494:22 7510:108 7709:341 7819:123 7968:98 8078:458 8751:59 8772:104 8804:81 8821:30 9497:107 9565:105 9683:138 10003:186 10161:27 10516:88 10744:179 10789:157 10799:106 10936:128 11164:47 11211:217 11303:91 11651:68 11668:87 11672:19 12206:108 12916:3 12917:90 13186:44 13829:80 14019:129 14023:99 14245:427 14404:73 14779:106 14844:94 15294:74 15747:26 15785:42 15798:86 16449:74 16461:55 17745:9 18010:154 18160:89 18364:24 18728:138 18964:83 18981:82 19204:80 19255:158 19787:84 +825:65 830:288 989:151 1040:35 1120:67 1785:127 1880:95 2145:13 2751:116 2934:151 3578:17 3720:493 3976:98 4554:126 4623:215 4786:55 5041:122 5361:120 5579:158 6234:136 6424:65 6546:328 6577:55 6592:41 6607:63 6666:117 6866:263 7262:71 7709:113 8151:39 8548:89 8799:103 8975:73 8985:45 9800:64 9900:117 10043:132 10731:295 10735:97 11126:128 11245:99 11546:56 11871:401 12008:155 12040:149 12448:80 12574:32 12916:9 13085:99 13143:54 13685:127 13881:142 13929:117 14167:131 14634:282 15294:112 16353:182 16417:71 16478:125 16682:114 17365:101 18118:24 18182:148 18229:293 18364:37 18623:74 18673:146 18916:23 19386:62 19443:275 19538:53 19738:72 19846:59 19880:150 19889:148 19999:93 +1044:81 1792:570 1967:40 2092:128 2347:50 2402:315 2593:106 2751:248 2990:84 4090:110 4820:99 5466:71 5874:251 6234:130 6311:64 6662:223 6866:302 7004:59 7494:174 7709:352 9734:134 9824:267 10161:51 10218:266 10241:143 10532:104 10744:232 10753:187 10799:75 11245:104 11763:99 11871:130 12173:48 12191:151 12429:128 12703:124 12878:99 13384:143 13829:38 14067:183 14194:106 14382:411 14416:91 15523:92 15901:43 16286:82 16706:116 16912:251 16914:82 17528:41 17806:64 18508:93 18916:91 +94:177 101:78 265:270 1284:39 1421:140 1560:148 1593:139 1878:89 1889:75 2621:62 3018:53 3131:239 3307:159 3578:13 4619:606 4830:227 5109:113 5125:133 5776:42 5821:134 6866:314 7363:235 7412:115 7494:90 7617:116 7709:334 8043:121 8349:138 8631:133 8804:136 8854:80 8871:39 8905:139 8943:195 9068:69 9671:115 9715:273 9971:152 10083:70 10292:51 10532:144 10843:82 10999:152 11668:100 11946:71 12238:51 12398:178 12682:102 12704:168 12968:92 13209:92 13210:75 13685:96 13929:45 14221:156 14332:102 14458:118 14474:70 14486:91 14682:69 15798:99 15886:63 16017:105 16826:89 17069:108 17371:152 17537:43 18778:214 18860:66 18886:179 19138:121 19247:113 19319:67 19411:110 19707:159 +247:75 732:59 832:66 1040:21 1052:80 1739:166 1978:138 2413:170 2480:5 2894:63 2915:457 2990:33 3372:70 3578:42 3715:218 3804:64 4308:91 4472:106 4494:69 5128:253 5154:142 5323:121 5743:369 5946:108 6234:49 6374:63 6449:85 6837:90 6859:99 6866:485 6915:62 7080:164 7363:63 7494:241 7673:124 7709:150 8228:60 8297:96 8361:129 8548:109 8559:466 8789:130 9059:43 9140:122 9403:83 9636:69 10241:76 10520:121 10532:90 10848:13 10998:92 11187:12 12448:49 12599:123 12610:46 12916:22 13006:131 13493:140 13685:78 13881:54 14027:67 14161:61 15198:115 15204:124 15294:69 15925:129 16342:61 16478:61 16555:95 16584:64 16659:134 16679:83 16701:51 16995:82 17145:80 17884:115 18110:109 18181:218 18364:23 18421:130 18438:85 18768:91 18800:73 18906:86 19455:93 19774:74 19926:188 19932:60 +80:110 101:38 202:106 251:42 654:125 657:57 891:167 939:83 1070:49 1155:1 1312:3 1324:53 1481:63 1569:102 1889:49 1986:147 2025:122 2455:222 2737:105 2990:81 3045:80 3123:105 3307:12 3578:6 3860:99 3988:56 4191:192 4263:121 4701:203 4722:66 5007:192 5010:97 5178:57 5639:73 5690:58 5736:64 5877:45 5945:100 6234:108 6274:96 6347:43 6602:53 6695:62 6866:319 6974:93 7055:34 7091:125 7129:62 7305:357 7494:221 7609:136 7682:104 7709:215 8214:65 8262:51 8556:99 8615:223 8703:47 8738:77 8817:54 8925:55 9497:76 9954:116 10161:188 10241:113 10329:77 10396:46 10428:89 10925:53 10936:71 11014:115 11309:27 11704:56 11871:46 12007:88 12463:41 12610:27 12674:55 12704:59 12916:3 13288:48 13309:140 13470:82 13638:172 13885:62 14067:39 14161:18 14168:15 14278:540 14404:41 14489:11 15204:8 15294:41 15523:74 15524:70 15644:95 15726:67 15777:138 15847:62 15930:67 16995:98 17267:96 17519:37 17808:72 18416:211 18813:61 18855:96 19122:91 19674:83 19715:95 19739:50 19787:34 19973:201 +9:64 137:64 215:95 251:29 732:48 1273:32 1282:152 1481:43 1565:66 1931:190 1957:92 1987:62 1990:32 2062:29 2317:49 2349:244 2358:174 2448:64 2516:60 2820:35 2934:56 3006:43 3308:78 3345:55 3443:56 3515:79 3578:26 3582:51 3860:34 4223:126 4259:44 4342:182 4383:136 4597:46 4612:66 4614:23 4642:137 4857:86 5021:54 5057:51 5816:80 5945:68 5977:39 6153:28 6197:64 6229:146 6234:309 6251:51 6449:35 6549:38 6837:37 6866:392 6922:77 7055:47 7060:91 7248:44 7709:46 7926:53 8160:75 8196:60 8214:44 8374:45 8471:60 8548:22 8586:58 8634:75 8789:107 8913:70 9043:37 9127:207 9276:80 9446:48 9661:44 9683:86 9711:26 9824:39 9939:121 10173:122 10245:70 10340:52 10377:87 10457:11 10532:129 10535:76 10541:49 10620:96 10799:98 10848:156 10875:58 11090:173 11224:32 11296:133 11591:247 11642:54 11871:109 11993:176 12022:42 12196:94 12445:67 12448:20 12677:103 12798:67 12804:252 12916:12 12968:69 13288:33 13337:206 13402:72 13435:47 13576:82 13864:76 13891:43 14067:27 14119:46 14161:25 14222:34 14556:26 14574:50 14758:257 14794:116 14926:60 15178:52 15294:56 15330:44 15495:172 15780:257 15785:61 15793:64 16034:90 16158:104 16432:67 16464:35 16478:85 16534:85 16584:52 16713:48 16760:108 16978:55 16983:57 17279:30 17289:61 17318:40 17369:179 17430:220 17582:108 17818:86 17850:52 17970:29 18163:79 18413:89 18503:62 18623:20 18688:36 18944:80 19109:109 19174:41 19309:37 19674:57 19721:87 19778:50 19909:52 19935:42 +87:554 94:146 268:58 596:45 616:28 1273:72 1304:102 1313:100 1348:61 1615:59 1824:110 1916:257 1968:28 2062:37 2075:65 2185:120 2361:72 2902:235 3086:32 3291:47 3433:236 3467:97 3578:32 3661:30 3803:302 3957:78 4153:100 4273:60 4394:78 4724:45 4831:80 5057:32 5233:98 5417:124 5551:41 5959:33 6066:64 6078:93 6099:59 6374:162 6378:193 6866:735 7205:97 7223:42 7355:79 7583:41 7709:256 7739:66 7782:57 8099:37 8262:43 8282:20 8324:102 8821:131 9614:42 9763:83 9960:67 10035:131 10218:49 10532:182 10735:31 10778:123 10848:138 10872:103 11277:71 11317:93 11342:168 11380:162 11422:63 11685:62 11871:60 12320:93 12380:42 12417:46 12448:25 12560:173 12599:43 12781:74 12916:8 13521:41 13710:130 13881:91 14308:112 14310:118 14384:57 14733:46 14848:10 14973:38 15292:102 15303:40 15433:190 15772:45 16112:56 16571:70 16584:66 16810:117 17017:101 17417:55 17471:90 17845:112 18306:70 18350:44 18372:136 18637:40 18711:245 18813:52 19109:152 19293:83 19399:68 19729:70 +118:56 247:81 1005:135 1040:58 1052:43 1424:99 1619:153 2268:98 2528:3 2658:110 2894:68 2934:51 3578:17 4308:49 4494:104 4573:58 4749:92 4926:95 4957:6 5379:54 5690:24 5956:128 6175:95 6374:34 6866:113 7666:90 7709:173 7893:50 7908:133 7967:117 8330:103 8548:59 8600:114 8759:38 8821:220 8825:71 9075:857 9180:71 9690:133 10206:144 10532:67 10799:88 10998:49 11245:119 11400:122 11871:27 12238:68 12523:99 12529:147 12610:74 12916:25 12935:35 13006:71 13384:72 13722:326 13907:57 13929:39 14161:33 14295:113 14682:98 14880:58 15303:42 15492:35 15674:137 15901:122 16347:62 16389:13 16802:178 16905:96 16995:44 17328:66 17441:68 17818:107 17841:66 17899:92 18364:37 18476:71 18728:17 18964:96 19109:139 19282:106 +203:59 229:111 576:90 820:127 1408:16 1555:137 1831:140 1889:7 1978:146 2185:72 2414:101 2425:197 2871:333 2934:125 2948:30 3578:7 3616:491 3641:207 3754:86 3932:89 4460:72 4502:90 5043:67 5049:158 5959:40 6153:43 6234:88 6339:90 6543:50 6712:192 6726:52 6866:329 6957:232 6959:56 7064:125 7398:126 7494:77 7709:14 7791:91 7872:184 7892:198 8789:94 8983:131 9279:104 9800:42 9960:182 10118:112 10161:71 10241:90 10422:348 10443:68 10532:95 10554:109 10744:52 10848:52 11090:66 11142:58 11235:86 11402:188 11822:39 11871:133 11910:23 12406:177 12448:31 12542:25 13063:59 13384:40 13829:111 13881:203 14186:114 14207:81 14313:137 14807:95 16034:96 16112:69 17679:263 17857:88 17970:44 18880:82 19196:112 19255:51 19589:141 19756:160 19778:78 19953:87 +413:88 520:112 596:66 625:95 657:72 863:95 1316:64 1436:59 1824:108 1951:59 2176:132 2715:107 2720:104 2874:101 2990:110 3084:84 3296:61 3433:174 3578:24 3976:96 4526:62 4579:94 4614:87 5454:97 5460:38 5612:74 5783:37 5874:205 5959:48 6027:14 6181:93 6236:93 6549:70 6731:113 6866:358 6868:104 7055:43 7363:48 7494:364 7510:89 7709:191 7729:60 7739:49 8037:99 8466:318 8707:78 9253:88 9971:5 10457:21 10510:80 10532:172 10735:45 10799:44 10848:208 11342:99 11711:102 11958:86 12173:113 12196:87 12238:95 12319:49 12601:113 12766:58 12915:131 12916:14 13645:85 13829:75 14404:104 14503:103 15080:92 15448:66 16094:172 17061:144 17104:50 17272:133 17394:93 17782:32 17798:103 17942:609 18364:17 18768:69 18800:106 19065:83 19109:30 19174:38 19210:48 19291:15 19528:110 19678:416 19809:60 19973:191 +219:193 287:115 415:49 1029:489 1040:16 1052:31 1057:64 1841:131 2145:93 2160:75 2709:120 2751:62 2934:255 3578:8 3661:58 3713:77 3876:78 4427:91 4831:62 5351:31 5361:56 5417:97 5625:90 5776:26 6234:86 6341:81 6424:80 6642:92 6660:120 6865:59 6866:365 7494:123 7539:608 7709:17 8141:43 8361:22 8755:158 9055:101 9128:124 9213:85 9334:56 9839:32 10043:90 10112:46 10443:227 10532:192 10579:117 10750:155 11116:108 11435:203 11668:62 11859:111 11869:57 11871:142 12472:114 12916:4 13174:91 13210:101 13685:59 13709:335 14638:96 15044:350 15359:77 15447:160 15492:50 15867:65 15901:108 16059:56 16081:90 16192:82 17017:150 17584:112 18115:70 18299:69 18306:57 18364:17 18410:85 18894:73 18964:48 19157:64 19174:38 19947:231 +112:71 1040:21 1044:146 1052:13 1413:110 1450:79 1472:98 1825:44 2054:112 2078:46 2133:551 2240:72 2376:42 2518:78 3237:33 3443:45 3578:17 3625:523 3713:33 4071:75 4328:30 4576:53 4614:19 4835:55 5305:50 5538:38 5584:40 5900:40 6274:26 6494:57 6571:144 6759:52 6866:275 7005:5 7051:67 7322:62 7494:282 7709:243 8285:106 8548:18 8666:70 8804:146 8945:29 9445:36 9596:21 9772:72 10321:38 10343:45 10449:109 10457:9 10620:26 10999:83 11287:74 11668:27 11685:38 11752:49 11769:60 11864:154 12198:49 12610:30 12916:10 12968:145 13384:80 13685:26 13857:99 13881:193 13884:89 13891:35 13929:12 14161:10 14492:524 14809:100 15294:45 15686:56 15780:16 15911:100 16211:41 16478:79 16496:36 16817:40 17017:65 17248:97 17572:90 17896:63 18115:170 18364:38 18372:14 18508:131 18768:30 18779:14 18794:109 18894:63 19095:28 19210:21 19386:38 19787:134 19822:147 +626:218 755:108 826:61 944:151 1120:104 1205:139 1479:189 1968:140 2036:296 2305:133 2407:47 2616:22 3818:92 4376:190 4545:102 4906:84 5304:75 5848:58 6372:93 6866:463 7049:96 7494:93 7709:121 8607:41 8720:86 8923:63 9734:84 9995:157 10194:19 10244:456 10457:35 10532:22 10848:134 10986:98 11053:97 11330:96 11642:126 11695:86 11805:67 11871:133 12170:678 12238:71 12674:40 12798:99 12916:132 12918:115 13224:13 13515:147 13728:105 13881:61 13929:46 14081:60 14164:51 14239:75 14258:126 14896:159 15605:79 15785:43 15840:95 15948:231 16568:57 16905:86 17094:80 17487:272 17821:131 17822:50 18115:69 18175:125 18695:139 18933:101 19217:80 19255:296 19685:115 +80:147 247:121 1316:92 1481:85 1767:67 2339:58 2764:115 2934:193 2993:5 3237:79 3337:161 3433:46 3578:25 3643:97 3713:81 3965:510 4113:99 4205:206 4319:132 4557:38 4569:176 4576:65 4707:57 4792:133 5354:67 5589:270 6060:99 6350:19 6865:124 6866:281 7086:140 7372:76 7483:322 7494:137 7560:78 7709:97 7924:145 8164:110 8410:106 8469:235 8490:101 8548:88 8804:136 8813:63 8948:83 9456:92 9776:172 10035:134 10276:130 10443:42 10532:206 10848:138 11006:63 11032:80 11308:97 11588:295 11765:73 12607:60 12610:73 12795:167 12916:15 13529:20 13685:63 13781:77 13898:79 13929:58 14161:25 14202:140 14375:61 14385:59 14556:52 14729:156 14785:163 14969:118 15438:27 15519:35 15705:37 15748:134 15798:129 15933:89 16043:72 16293:95 16638:94 16682:93 17015:102 17102:69 17279:120 17328:99 17428:102 18115:102 18432:83 18792:84 18800:81 19049:99 19153:46 19278:44 19327:144 19904:80 19926:83 +81:152 184:114 228:168 319:73 413:50 502:58 520:115 635:533 659:87 1040:112 1086:88 1299:126 1427:47 1523:104 1549:36 1565:351 1699:33 1951:34 1952:308 2269:15 2602:54 2894:27 2990:277 3086:27 3578:23 4191:93 4259:47 4540:73 4569:48 4603:46 4707:31 4831:58 4882:59 4976:62 5151:97 5361:32 5425:75 5446:54 5874:39 6007:66 6130:80 6153:30 6181:53 6234:129 6274:69 6444:69 6554:45 6602:77 6866:215 6868:74 6952:48 7257:42 7338:194 7363:55 7494:170 7536:55 7709:122 7739:28 7836:169 7896:86 8061:51 8217:114 8628:196 8847:136 9059:142 9382:50 9987:104 10137:95 10161:280 10229:47 10238:137 10316:57 10443:62 10445:69 10457:48 10532:97 10535:40 10564:118 10620:34 10725:65 10841:76 10923:52 10999:87 11147:62 11292:86 11317:85 11356:196 11620:164 11871:186 11892:87 11952:66 12366:85 12400:49 12484:62 12916:55 12935:28 13118:77 13143:114 13529:54 13829:84 13938:82 14158:103 14697:32 14873:40 14880:56 14993:124 15000:40 15292:43 15398:84 15934:70 16157:46 16159:52 16257:86 16260:102 16478:110 16654:102 16705:50 16761:65 16881:37 16913:38 17173:51 17267:35 17947:122 17949:86 17952:50 17970:31 18050:29 18109:115 18199:60 18227:100 18263:50 18308:74 18769:34 18792:90 19210:55 19255:154 19278:24 19384:53 19429:49 19779:83 19809:69 19826:62 19894:125 19926:88 +67:82 520:93 626:124 833:107 953:50 1691:110 2142:104 2990:80 3578:15 4195:82 4383:322 4698:198 4826:143 5084:73 5937:37 5953:84 6234:142 6319:188 6372:138 6866:394 6900:326 7262:148 7372:94 7443:200 7494:368 7509:85 7709:64 7836:37 8433:56 8515:69 9328:150 9709:280 9763:2 9766:66 10035:103 10161:114 10443:53 10532:151 10986:91 10999:102 11083:34 11267:53 11769:60 11997:105 12238:4 12798:116 12916:12 12968:113 13441:55 13465:46 13881:99 14703:423 15226:152 15780:140 15793:114 15931:102 16701:56 16851:79 16853:567 17380:27 17738:16 18397:109 19386:55 19557:87 +164:112 260:49 626:322 1040:213 1052:100 1057:43 2990:156 4514:113 4820:100 5132:89 6125:80 6866:91 7271:66 7494:122 7709:222 8925:73 9766:76 10241:96 10244:674 10443:118 10457:68 10532:117 10561:2 10777:86 10821:101 10848:122 10964:73 11050:50 11220:52 11330:189 11400:86 11871:346 12173:118 12320:122 12406:64 12529:234 12916:157 13731:99 13784:89 13881:127 13958:48 14239:148 14325:60 15017:82 15747:85 15888:116 16124:47 16810:82 16974:327 17487:268 17782:41 18281:57 18301:52 18364:57 18779:16 18860:89 19109:36 19138:55 19309:16 19546:124 19628:118 19685:341 +559:75 626:313 1040:272 1052:73 2136:84 3025:152 3976:31 4962:104 5057:114 6027:45 6124:137 6866:413 7206:11 7494:133 7709:106 7836:70 8228:7 8297:101 8433:104 8789:113 8804:210 9109:94 10027:150 10244:573 10848:112 11070:79 11325:64 11362:79 11400:54 11685:187 11871:161 12916:134 12968:126 12992:180 13285:205 13628:101 14239:108 15238:115 15948:166 16330:130 16515:39 16974:238 17245:45 17365:104 17374:106 17487:293 18599:90 19034:71 19516:380 19594:68 19685:249 +651:83 710:94 1052:82 1359:41 1833:54 2463:72 2560:61 2934:64 3150:105 3213:86 3307:88 3361:78 3578:32 3698:68 4173:146 4258:163 4349:79 4395:35 4427:65 4914:162 5000:88 5041:76 5057:192 5532:40 5890:102 5910:238 6865:158 6866:458 7051:104 7287:134 7494:102 7709:242 7729:81 7743:105 8007:138 8099:74 8370:374 8804:79 8843:163 8913:88 9033:120 9160:125 9776:109 10161:165 10457:28 10532:259 10971:135 11409:277 11658:156 11668:83 11685:210 11822:63 11871:222 11966:451 13050:129 13685:159 13881:105 14222:85 14800:124 14844:179 14896:29 15050:103 15549:66 15780:50 15798:82 15885:63 15887:8 16006:80 16248:111 16624:54 16632:55 16997:77 17005:54 17138:139 17343:135 17417:72 18115:79 18160:84 19174:51 19187:81 19386:39 19618:230 19873:100 +203:59 257:84 407:110 492:118 520:129 651:117 1040:13 1052:25 1057:96 1363:95 1521:85 1810:88 1968:69 2079:141 2878:78 2990:121 3084:99 3119:760 3187:49 3469:39 3578:20 3746:239 3774:165 3860:52 4313:96 4376:68 4443:126 4526:155 4614:108 4764:84 4957:225 5253:62 5305:48 5609:128 6234:117 6744:141 6866:259 7118:70 7494:139 7709:210 8152:41 8261:242 8349:70 8703:50 8722:80 8801:101 8943:50 9383:97 9987:188 10154:98 10316:83 10428:68 10604:147 10628:49 10848:68 11515:68 11871:132 12118:34 12133:76 12161:118 12674:95 12755:115 12858:78 12916:5 13089:35 13143:181 13281:107 13685:49 14047:81 14269:120 14404:43 14555:110 14574:155 14947:142 15572:32 15780:31 15811:96 16773:130 16793:149 17857:62 18160:52 18364:14 18694:71 18728:45 18779:140 18916:147 19133:76 19386:24 19658:106 19787:71 19935:64 +520:91 989:121 1165:29 2334:73 2638:55 2819:240 2823:56 2894:335 2934:92 3051:14 3084:182 3156:201 3264:141 3361:101 3578:28 3767:89 4385:176 4554:103 5170:147 6234:214 6865:103 6866:173 6955:138 7066:123 7320:135 7494:128 7709:100 7870:265 7908:74 7944:43 8362:290 8720:140 9314:103 9668:242 10521:76 10532:169 10848:138 11422:163 11529:76 12007:87 12264:142 12348:211 12380:84 12466:448 12499:193 12540:80 12606:65 12610:61 12916:15 13171:183 13529:79 13560:126 13574:147 13881:75 13896:333 14792:171 14851:82 15294:91 15448:100 15470:81 15681:143 15732:45 15844:96 16029:136 16159:158 16197:103 16296:30 16513:89 17105:153 17626:84 17821:81 18007:113 18364:30 19282:102 19386:51 19926:55 19999:148 +293:56 422:74 936:114 998:90 1040:104 1057:128 1378:87 1691:147 2403:103 2934:95 3154:99 3976:97 4226:108 4473:138 4980:112 5305:369 5370:89 5547:59 6234:182 6532:100 6696:99 6866:130 7494:58 7709:30 8789:31 8895:740 9525:122 10054:49 10626:73 11518:383 11871:197 12021:97 12238:123 12529:31 12798:116 12916:13 13378:157 13411:71 13428:118 13685:380 13881:58 14296:60 14385:39 14416:54 15523:56 16511:122 16716:161 16826:29 17091:103 17242:5 17327:30 18364:111 18800:23 19080:135 19546:24 +247:201 270:27 520:121 1040:57 1044:92 1052:36 1436:50 1889:241 1912:3 2362:30 2555:60 2751:71 2858:116 2934:102 2990:130 3096:75 3122:67 3307:49 3308:57 3578:28 3604:64 4021:65 4427:72 4576:72 4957:122 5081:68 5178:84 5361:66 5625:104 5727:706 6153:368 6449:76 6866:172 7051:181 7271:79 7680:58 7709:167 7780:200 7872:141 8099:65 8297:63 8475:79 8488:69 8710:103 8799:80 9109:101 9407:158 9446:52 9600:40 10264:108 10457:24 10708:87 10848:81 10933:69 11006:70 11724:7 11871:166 11913:91 12191:51 12448:88 12463:56 12610:122 12916:10 13378:11 13594:154 13685:69 13829:115 13929:32 14161:27 14347:94 15069:144 15372:63 15492:116 15523:127 15747:18 15780:43 16084:107 16810:119 17145:26 17369:292 17727:52 17869:109 17896:36 18115:129 18160:74 18282:58 18500:86 19085:114 19095:76 19174:44 19552:117 +448:101 492:70 732:59 1070:81 1152:97 1273:57 1395:105 1521:135 1743:98 1897:138 2025:114 2136:66 2171:81 2185:129 2239:107 3578:31 4576:80 5128:251 5305:75 5368:460 5869:109 6234:84 6285:143 6315:97 6866:368 7206:58 7494:10 7560:97 7709:15 7836:121 8320:100 8488:164 8685:42 8842:43 9241:161 9989:85 10177:116 10276:107 10292:28 10340:127 10443:245 10532:170 10705:312 10999:116 11085:130 11447:84 11588:91 11663:84 11668:81 12821:82 12868:157 12916:8 13380:109 13402:2 13881:40 13891:106 13929:36 14272:62 14447:223 14638:105 14743:92 14848:109 14899:73 14987:109 15492:130 15796:39 15798:159 16302:73 16724:118 16864:95 16947:118 17011:189 17017:97 17267:79 17270:97 17272:262 17326:121 17369:109 17506:148 18115:50 18372:128 18594:102 18676:163 19287:124 19386:76 19823:391 19926:290 +80:117 356:136 732:38 967:88 1040:27 1057:90 1125:84 1715:194 1743:126 1968:176 2398:87 2597:125 2927:64 2934:63 2968:197 3122:121 3242:91 3307:85 3308:41 3381:106 3537:132 3578:34 3713:64 3860:53 4200:57 4294:101 4308:58 4521:69 4526:52 4571:52 4576:51 4755:168 5231:65 5428:85 5803:273 5821:85 5937:19 5965:98 6685:77 6866:574 7004:41 7097:80 7271:126 7363:40 7560:62 7709:199 7729:50 7893:59 8078:30 8079:73 8144:54 8772:91 9080:136 9402:112 9549:44 9674:90 9860:58 10173:95 10206:57 10256:82 10276:103 10394:123 10417:101 10457:35 10510:144 10532:171 10789:93 10925:113 11224:99 11647:74 12120:88 12136:121 12238:66 12270:57 12380:63 12540:37 12610:146 12644:106 12674:64 12708:28 12777:85 12916:26 13310:119 13381:79 13453:123 13504:28 13709:93 13829:92 13846:128 13881:141 14161:59 14771:20 14851:39 14979:103 15303:50 15737:117 15780:62 15806:82 15879:170 16215:506 16584:164 16873:31 17045:118 17104:293 17267:153 17332:79 17457:61 18454:42 19174:32 19327:38 19386:49 19524:127 19787:127 +137:103 251:126 413:151 567:101 660:120 891:60 1014:105 1057:126 1173:236 1555:142 1565:52 1943:151 1981:62 1987:148 2042:98 2728:96 2820:55 2897:62 3213:40 3308:42 3433:37 3436:107 3483:68 3578:14 3713:65 3830:133 3882:154 3932:139 4113:76 4247:85 4472:69 4554:50 4674:92 4722:215 4754:93 5057:41 5178:123 5401:107 5510:96 5867:58 5903:71 6111:79 6132:85 6172:76 6234:203 6383:98 6866:473 7055:37 7087:102 7164:40 7709:18 7746:130 7836:87 7846:88 7893:60 8337:85 8666:69 8789:91 8925:90 9248:117 9421:119 9424:54 9631:190 9695:205 10184:123 10241:88 10277:90 10338:182 10377:46 10457:36 10532:156 10620:51 10623:97 10848:35 11168:58 11245:40 11422:79 11752:96 12087:26 12319:42 12473:91 12529:92 12721:72 12798:48 12821:28 12916:4 13114:77 13428:64 13502:35 13653:76 13891:69 13988:81 14074:90 14122:123 14197:96 14293:71 14385:63 14532:79 14880:76 14899:40 15427:185 15529:70 15656:82 15780:63 15818:95 15848:78 16034:56 16094:74 16112:72 16567:201 16886:99 16997:195 17336:93 17430:88 17801:95 18115:92 18406:71 18672:82 19117:128 19247:97 19282:98 19572:101 19649:60 19924:322 19973:55 +203:104 303:344 626:48 732:65 1040:24 1044:76 1968:61 1998:69 2040:130 2194:98 2638:49 2707:162 3176:141 3425:40 3578:23 3826:179 3976:111 4057:29 4426:39 4494:53 4707:78 4962:51 5131:103 5176:193 5538:128 5561:100 5636:27 5937:18 6096:36 6347:138 6379:99 6571:162 6662:89 6685:134 6866:115 7055:63 7441:163 7549:47 7709:142 7909:85 7947:112 8215:84 8298:105 8488:124 8844:161 9056:88 9662:80 9976:544 9987:66 11159:41 11229:154 11400:23 11631:106 11685:101 11822:69 12238:142 12278:220 12320:124 12448:55 12506:117 12529:123 12595:132 12610:51 12916:24 12968:131 13165:256 13305:133 13429:76 13570:135 13687:125 13929:40 14161:34 14880:100 15330:120 15355:85 15674:102 15957:47 16007:78 16211:274 16400:105 16682:61 17003:103 17711:99 17755:90 17841:98 17952:127 18364:51 18493:91 18700:131 18916:11 18944:35 19255:79 19386:85 19685:51 19921:62 +101:74 247:88 493:73 520:173 812:109 833:74 1950:60 2185:97 2610:88 2932:182 2990:97 3150:120 3349:575 3578:12 3847:137 4573:80 4576:94 5034:50 5321:46 5361:87 5466:159 5946:81 6164:256 6234:229 6406:109 6866:238 6927:196 7145:37 7287:123 7494:44 7599:113 7709:125 7819:89 8349:131 8450:221 8804:141 8925:100 9047:72 9276:75 9325:87 9676:75 10414:575 10532:298 10795:73 10899:104 11871:91 12238:288 12916:6 13143:77 13402:75 13685:182 13744:65 13907:80 14161:72 15640:196 15798:94 17152:114 17270:51 17343:55 17914:99 18160:194 18369:60 18921:4 19217:129 19255:83 +295:107 732:153 989:32 1057:69 1147:95 1395:136 1920:27 2751:129 2858:97 3265:496 3501:109 3578:27 3617:145 3976:66 4669:6 4751:145 5811:356 5890:130 6006:60 6372:90 6866:347 7257:93 7363:82 7494:226 7709:359 7739:84 7829:118 7872:102 8125:60 8347:138 8548:142 8666:138 8720:12 8759:97 8789:103 8967:177 9233:102 9290:95 9698:139 9763:66 10035:133 10161:126 10457:35 10515:127 10532:160 10753:26 10936:46 11014:250 11092:118 11181:96 11871:99 12029:145 12607:98 12732:205 12916:18 13024:129 13829:106 13929:47 14161:80 14212:128 14221:164 14556:84 14778:216 14896:43 14973:96 15674:63 15995:71 16029:111 16747:55 16810:15 17008:116 17173:77 17890:49 19109:78 19386:50 19660:89 19710:464 19787:15 19926:53 +101:79 174:44 203:23 237:51 268:210 404:34 548:27 560:67 585:50 659:199 732:88 891:23 958:60 991:40 1188:30 1273:168 1289:102 1313:48 1560:30 1765:90 1824:35 2015:52 2062:71 2239:62 2290:43 2502:38 2560:55 2746:31 2818:51 2850:56 2894:16 3164:52 3187:58 3280:30 3296:20 3449:29 3483:52 3515:32 3578:37 3582:62 3703:27 3711:33 3860:21 3882:20 3896:25 3949:29 4001:54 4161:36 4195:55 4259:27 4368:33 4412:51 4526:20 4637:18 4707:18 4710:104 4802:128 4882:67 4925:28 5057:16 5068:28 5077:33 5130:88 5176:43 5227:61 5233:24 5339:54 5355:58 5361:18 5362:38 5385:119 5489:41 5507:21 5551:39 5563:39 5596:24 5751:34 5821:135 5867:311 5945:21 6067:47 6084:25 6222:28 6234:140 6374:16 6433:61 6554:26 6662:40 6685:30 6790:37 6792:105 6866:402 6896:36 6947:44 7055:28 7183:150 7250:160 7278:47 7363:16 7406:35 7478:50 7494:20 7592:29 7709:98 7746:25 7826:61 7877:32 8034:34 8078:47 8079:57 8091:89 8123:122 8150:377 8194:30 8215:19 8218:25 8293:38 8475:84 8488:124 8548:14 8645:144 8759:107 8778:31 8807:47 8986:46 9166:21 9261:32 9614:20 9661:53 9707:31 9736:29 9739:85 9751:82 9987:15 9998:24 10112:55 10145:98 10241:80 10276:27 10339:37 10377:35 10441:27 10442:45 10457:34 10532:112 10541:30 10543:49 10723:109 10925:22 11036:55 11168:44 11328:39 11350:28 11377:11 11380:98 11552:47 11624:27 11695:24 11728:312 11871:177 11877:21 11968:29 11971:26 12036:39 12104:30 12170:30 12728:71 12916:16 12918:109 12935:16 12968:73 13006:16 13114:29 13151:29 13165:76 13194:70 13210:138 13258:24 13366:39 13490:48 13653:29 13765:33 13921:105 13929:9 14158:20 14161:15 14167:20 14203:58 14260:94 14332:127 14334:40 14358:175 14501:43 14543:64 14606:28 14653:30 14666:44 14826:47 14851:91 14878:37 14885:53 14901:90 14973:18 15026:39 15115:21 15294:17 15301:29 15303:137 15330:54 15429:72 15492:32 15697:24 15747:29 15772:22 15785:44 15793:20 15813:114 15848:30 16084:60 16217:117 16232:37 16298:123 16325:22 16523:35 16552:26 16571:34 16795:64 16881:42 16912:67 16997:186 17083:41 17104:98 17173:73 17189:292 17267:20 17272:44 17277:26 17332:31 17369:82 17448:135 17471:44 17493:90 17563:99 17711:67 17784:23 17824:23 17970:18 18100:82 18115:75 18130:38 18214:42 18406:27 18474:128 18503:38 18688:22 18821:56 18990:43 19113:49 19174:12 19220:45 19354:39 19415:145 19473:56 19575:44 19596:44 19685:11 19739:21 19788:29 19809:20 19921:42 19985:55 +520:67 776:113 1040:24 1052:44 1057:88 1100:144 1110:39 1268:116 1521:450 1978:62 2538:61 2720:34 2736:86 3458:97 3578:12 3988:103 4493:149 4554:85 4704:77 5052:25 5305:84 5446:71 5449:113 6548:54 6866:215 7174:483 7287:141 7938:118 8408:63 8943:87 9126:138 9352:128 9734:78 10035:73 10112:120 10443:30 10509:88 10532:101 10925:98 11171:198 11280:123 11530:55 11695:98 11820:21 11871:72 12038:117 12054:64 12582:148 12644:184 12781:16 12916:18 12917:56 13143:25 13292:185 13685:86 13929:80 13963:211 14729:132 14871:41 15265:154 15429:107 15440:87 15523:103 15669:79 15681:133 15798:88 15891:126 16034:123 16286:73 16385:124 16457:65 16464:96 16817:134 17196:132 17537:56 17674:99 17904:118 17961:129 18041:69 18323:84 18364:50 18399:206 18742:471 19247:39 19799:48 +239:14 285:94 415:90 435:69 690:135 1012:107 1057:27 1502:82 2019:160 2295:67 2474:144 2789:77 2915:246 3051:112 3456:28 3578:26 3664:82 3932:57 4512:151 4524:73 4614:46 4914:256 5041:60 5162:64 5247:61 5446:82 5500:59 5607:96 5743:199 5747:177 5760:37 5863:84 6125:102 6866:58 6886:166 6955:84 7363:51 7494:138 7630:63 7709:248 7920:82 8081:10 8218:80 8996:122 9276:67 9485:118 9671:67 10043:170 10206:144 10394:52 10532:39 10799:72 10831:316 10841:282 11245:63 11822:150 12238:218 12435:497 12682:70 13384:110 13400:156 13881:23 14800:98 14843:99 14912:131 15204:86 15523:121 15640:135 15777:136 15780:39 16187:108 16302:156 16529:98 16584:52 16623:31 16711:91 17017:79 17045:75 17076:54 17523:79 18098:321 18109:176 18490:158 18623:75 18916:35 18964:7 19386:31 +118:130 151:187 626:164 1040:203 1052:76 1252:111 1268:24 1688:383 1914:121 2501:345 2751:62 2934:110 3086:118 3187:147 3614:138 4134:139 4723:136 4742:43 5142:56 5173:332 5946:166 5956:42 6234:116 6792:68 6866:331 7120:85 7260:39 7443:262 7494:200 7505:62 7709:147 7725:93 7848:57 8387:63 8433:38 9077:33 9734:109 9914:63 10035:74 10110:61 10161:102 10244:257 10255:19 10292:122 10359:71 10443:12 10799:135 10936:171 11087:29 11106:54 11171:54 11563:13 11746:62 11871:283 11994:187 12036:91 12238:140 12798:77 12916:88 13384:160 13737:168 13881:146 14239:112 14665:95 14851:117 15605:112 16162:234 16325:165 16432:156 16487:125 16504:77 16974:124 17326:55 17487:102 17939:198 18364:130 18838:93 18886:275 19109:35 19609:121 19685:87 19752:73 19926:100 +1040:37 1273:120 1671:67 1793:37 2524:146 2720:43 2746:106 2934:54 3307:17 3578:9 3650:72 4318:90 4554:66 4561:140 4614:49 4693:106 4831:53 5128:54 5574:141 5683:114 5776:56 5791:89 6027:84 6060:37 6234:48 6866:366 7048:144 7088:34 7625:504 7850:67 7911:135 8631:92 8786:162 8804:60 9331:90 10161:210 10241:82 10457:23 10532:54 10799:10 11002:62 11610:146 11871:196 12136:244 12183:210 12350:67 12529:105 12748:75 12916:9 13685:67 13721:89 14158:68 14638:125 14793:39 14832:683 16584:110 16657:30 16851:126 16936:87 17169:100 17526:101 18241:15 18323:42 19108:62 19132:50 19247:73 19280:155 19386:33 +1124:160 2091:1 2244:66 2528:181 2990:117 3247:87 3295:81 3361:83 4411:143 4573:102 4614:132 4906:112 5128:146 5298:80 5446:131 5760:99 6102:95 6234:147 6406:95 6866:377 7709:124 7910:96 8211:60 8433:36 8903:97 9255:504 10187:98 10393:76 10514:374 10609:465 10626:58 10860:68 11106:36 11107:29 11245:37 11309:15 11336:6 11695:104 11871:64 11959:519 12469:84 12599:5 12604:128 13085:67 13378:87 13384:138 15134:112 15294:158 15378:87 15623:97 16029:95 16165:84 17961:76 18160:190 18447:94 19120:98 19255:70 19327:137 19582:90 19787:82 +48:53 348:126 985:54 1057:34 1067:75 1244:67 1821:54 1953:552 2253:166 2290:247 2934:65 3086:59 3187:148 3308:122 3433:109 3578:10 3711:380 3932:135 4614:55 5262:103 5431:76 5612:93 5783:163 5885:62 6382:59 6866:211 6973:99 7494:137 7709:162 7739:61 7979:166 8078:135 8277:73 8555:148 8707:390 9276:85 9448:148 10035:145 10161:87 10208:91 10457:26 10626:142 10640:106 10996:63 11085:108 11245:92 11764:5 12238:64 12582:56 12674:78 12704:54 12916:55 13982:118 14177:83 14316:136 14733:169 15017:68 15103:106 15780:46 15793:75 15931:94 16301:116 16325:165 16451:46 16810:168 16851:57 16962:76 17040:69 17079:90 17702:108 18050:64 18715:115 19095:81 19109:75 19144:90 19174:47 19272:114 19357:117 +520:32 1075:39 1617:64 1691:57 2025:81 2062:290 2176:99 2524:52 2751:99 3084:90 3361:112 4376:143 4489:138 4637:298 5175:116 5487:81 6306:67 6866:210 7129:127 7494:184 7709:367 9271:108 9566:130 9594:52 9614:328 9849:590 9907:76 10241:67 10532:186 10799:96 10848:71 11342:265 11871:341 11913:73 12582:194 12721:105 12786:18 12798:82 12916:44 13628:151 14015:35 14161:248 14638:87 15376:84 15427:385 15589:112 15780:196 16515:136 16706:106 16908:187 16919:58 17353:102 17818:162 18169:97 18306:55 19278:222 19849:62 +42:50 81:195 247:42 286:133 302:119 438:108 442:223 520:130 912:43 981:25 1040:6 1057:51 1228:274 1244:59 1249:114 1282:35 1313:27 1565:23 1739:70 1784:47 1798:75 1889:97 2003:51 2136:56 2398:38 2696:50 2894:18 2902:26 3150:29 3266:66 3283:118 3433:145 3490:111 3578:12 3746:187 3858:40 3893:87 4266:120 4288:43 4384:142 4438:92 4511:34 4574:41 4687:27 5057:35 5131:26 5166:33 5253:28 5305:21 5347:104 5412:70 5443:89 5869:31 5877:21 5913:61 5959:18 6018:668 6065:53 6269:40 6589:51 6606:134 6695:58 6837:50 6866:37 6876:40 6887:117 7004:116 7090:43 7182:63 7214:41 7336:63 7371:34 7432:102 7494:173 7514:32 7640:30 8079:32 8196:41 8320:18 8362:30 8504:69 8553:57 8593:72 8737:45 8821:29 9227:44 9473:154 9756:161 9757:27 9772:74 9791:46 9860:51 9989:24 9998:55 10009:33 10277:39 10319:55 10389:38 10446:27 10457:23 10532:142 10535:26 10541:34 10620:44 10640:31 10717:93 10735:17 10799:101 10848:108 11018:27 11518:22 11811:61 11871:63 12111:56 12268:69 12314:42 12319:18 12400:32 12448:28 12465:55 12475:43 12529:9 12831:178 12834:44 12916:4 13258:27 13337:28 13519:34 13604:82 13645:31 13679:92 13834:51 14161:17 14207:36 14332:130 14375:21 14501:24 14574:34 14752:122 14851:34 15014:77 15038:61 15094:117 15102:36 15164:90 15492:18 15780:27 15818:41 15908:56 15963:55 16006:22 16034:129 16050:45 16232:42 16552:59 16581:27 16705:32 16773:116 16810:69 16826:115 16995:23 17104:73 17233:39 17771:33 17970:20 17990:33 18032:44 18115:123 18160:23 18314:27 18364:6 18438:89 18539:95 18594:29 18722:113 18854:46 18916:15 18946:61 19001:79 19024:64 19061:57 19095:24 19123:55 19210:18 19255:18 19429:32 19480:58 19739:47 19778:35 19783:81 +907:72 1004:100 1131:91 1866:23 2932:70 2990:73 3264:82 4123:117 5128:170 5219:94 5730:365 5946:21 6167:8 6227:92 6866:217 6882:74 7248:291 7449:89 7494:87 7709:234 7908:90 8361:141 8759:64 8956:39 9109:99 9751:98 10443:199 10532:108 10746:114 10848:82 11425:74 11524:84 11740:118 11871:266 12206:16 12238:111 13384:39 13565:543 13832:63 14283:179 14310:24 14323:46 15021:69 16029:67 16223:77 16330:87 16339:444 16724:319 16835:4 17222:82 17754:112 18372:128 18498:74 18944:135 19255:49 19261:344 19757:44 19774:200 +520:162 790:95 890:54 1022:444 1057:191 1541:137 1631:136 1889:170 2621:57 2660:20 2751:124 2810:146 2934:80 3043:28 3433:40 3578:15 3879:49 3882:55 3976:98 4011:104 4085:98 4494:57 4524:96 4526:57 5057:44 5644:87 6099:81 6355:110 6453:55 6500:30 6818:164 6866:604 7709:157 7770:125 8298:66 8548:38 8821:92 8943:110 9019:146 9359:71 9526:76 10112:54 10241:31 10390:131 10483:116 10508:51 10532:68 10579:81 10718:113 10735:42 10799:146 11270:65 11400:57 11845:158 12238:37 12319:46 12508:104 12916:15 13143:36 13685:55 13691:389 13818:34 14067:46 14404:48 14434:79 14697:51 14729:20 14762:70 14851:86 15427:67 15613:147 15798:56 16475:144 16478:89 16542:123 16913:121 17270:44 17279:52 17560:385 17753:83 17821:105 17939:130 17975:112 19139:161 19197:69 19255:43 19314:28 19386:27 19432:68 19634:82 19737:45 19787:131 19919:417 +247:191 1052:51 1057:119 1683:141 1987:96 2894:79 2990:137 3361:46 3563:130 3578:27 4308:231 4376:118 4472:134 4614:73 4667:93 4675:131 5128:160 5417:83 5541:122 6180:111 6244:63 6374:80 6449:108 6642:150 6652:136 6866:34 6868:50 6927:105 6955:262 7494:205 7709:298 7893:117 8074:226 8262:107 8548:69 8903:83 8943:199 9162:24 9251:463 9565:89 9952:71 10003:27 10206:112 10443:124 10532:209 10799:96 10884:69 10936:93 10998:116 11106:88 11164:65 11490:102 11871:181 11975:295 12238:146 12610:231 12798:47 12916:59 12968:58 13006:83 13400:101 13459:14 13529:94 13829:59 13881:79 13929:46 13982:9 14161:78 15303:199 15318:74 15434:64 15627:82 15925:164 16094:143 16307:30 16581:120 16775:53 16777:79 16814:57 16995:103 17659:131 17782:96 18031:111 18218:51 18364:58 18653:105 18740:25 18843:295 18986:144 19132:83 19255:102 19309:159 19787:140 19926:95 +448:170 773:132 1052:22 1244:39 1565:541 1743:110 2009:130 2194:99 2746:138 2894:70 2990:84 3320:77 3361:88 3578:6 3804:45 4191:120 4614:32 4764:61 4770:93 5686:153 5801:74 6705:79 6866:224 6968:99 6973:91 7019:85 7182:251 7395:117 7709:395 7727:247 7826:96 7908:139 8191:81 8333:188 8374:62 8731:110 8803:96 8840:47 8874:102 8884:59 8891:121 9294:131 9594:35 9851:157 10004:62 10394:85 10408:408 10443:79 10449:155 10532:232 10731:102 10971:107 11018:55 11106:151 11164:48 11303:111 11342:37 11447:132 11577:194 11968:196 12121:119 12238:36 12380:177 12520:36 12582:110 12840:91 12916:67 13929:61 14245:74 14531:115 15294:38 15458:98 15512:153 15523:93 15697:109 15780:54 15954:102 16810:110 17899:96 17942:225 18009:83 18198:87 18251:131 18364:64 18519:135 18898:79 19386:21 19478:93 +80:66 151:54 186:127 286:21 303:19 396:78 549:104 657:17 869:27 981:33 1040:8 1052:15 1336:55 1456:31 1481:38 1549:30 1743:18 1951:14 1952:18 1968:30 2139:73 2630:17 2690:16 2820:46 2894:11 2896:28 2998:33 3051:44 3433:10 3578:11 3661:112 3741:57 3878:78 4053:30 4132:254 4145:39 4308:17 4495:24 4520:74 4576:15 4614:10 4701:109 4707:13 4733:88 5128:11 5143:62 5308:109 5361:27 5401:30 5829:41 5965:28 6274:14 6304:120 6315:18 6374:11 6449:15 6456:41 6695:19 6859:36 6866:157 6917:16 6927:15 6972:28 6983:81 6994:84 7287:106 7363:12 7371:22 7494:127 7709:164 7741:62 8152:12 8349:101 8365:56 8374:20 8518:96 8548:20 8565:337 8615:22 8784:52 8821:81 8916:75 8945:16 9732:91 9741:29 9885:64 9987:22 10161:19 10180:29 10381:20 10487:30 10532:141 10624:155 10779:55 10848:38 10998:83 11014:35 11074:21 11085:133 11204:42 11668:15 11675:29 11737:57 11792:153 11811:40 12290:91 12363:21 12448:9 12475:28 12547:19 12587:415 12607:27 12610:33 12704:70 12726:115 12916:10 12968:291 13006:36 13143:151 13151:21 13428:77 13435:42 13685:14 13688:43 13881:38 14047:92 14067:12 14131:58 14158:14 14161:22 14167:15 14325:9 14844:16 14948:29 15427:34 15433:17 15448:37 15492:24 15780:9 15798:29 15853:18 15925:23 15983:30 16084:22 16112:20 16184:65 16232:13 16371:24 16713:21 16766:48 16810:66 16826:69 16905:102 16995:15 17003:17 17083:15 17091:62 17173:11 17272:16 17279:27 17403:131 17422:121 17526:21 17638:26 17824:16 18037:19 18263:21 18364:25 18673:95 18684:24 18768:17 18986:103 19008:673 19158:63 19266:24 19327:54 19386:7 19455:17 19517:58 19873:18 19997:28 +151:222 1091:139 1316:21 1889:58 1978:53 2044:143 2089:125 2560:192 2934:161 2990:28 4011:16 4132:316 4134:65 4139:178 4489:27 4498:22 4764:151 5143:386 5519:45 6866:324 7494:132 7709:341 8043:128 10241:115 10311:65 10532:238 10747:51 10799:183 10848:7 10934:114 11447:163 11636:152 12238:216 12587:323 12674:77 12781:41 12798:50 14060:17 14570:41 15411:73 16034:31 16114:124 16392:50 16850:35 17526:265 17638:318 18010:120 18122:91 18925:108 18986:257 19008:599 19108:156 19593:72 19738:7 19894:137 +257:57 626:217 1040:108 1238:119 1316:89 1781:44 2857:108 2902:232 2990:80 3640:145 4281:154 4545:86 4584:124 4994:58 5431:96 5838:477 5931:128 6158:59 6221:142 6235:69 6866:397 7494:216 7709:155 8433:108 8759:70 8841:108 8925:58 9109:11 9125:62 9530:26 9671:74 9703:81 10027:34 10161:249 10244:341 10276:136 10532:262 10628:55 10940:87 11871:114 11899:569 11989:127 11994:248 12238:65 12313:130 12406:189 12704:25 12916:90 13012:129 13236:38 13929:91 14239:150 14766:137 15306:112 15777:126 16974:165 17487:136 17528:101 18334:107 18376:123 18737:58 18808:40 18943:94 19538:101 19572:103 19685:230 +370:137 563:99 1070:46 1316:79 1565:137 1682:64 1777:22 1889:12 1920:95 2746:70 3003:116 3181:48 3384:83 3578:6 3645:84 3734:445 3915:62 4308:104 5177:92 5304:187 5361:42 5627:60 5827:129 5945:47 6234:60 6439:76 6530:55 6866:192 6927:47 7363:36 7481:56 7494:92 7709:181 8718:159 9170:133 9338:78 9618:136 10131:215 10457:16 10516:22 10532:27 10735:34 10841:198 10848:189 11699:70 11704:106 11822:35 11871:54 12191:83 12772:157 12863:277 12916:11 12985:69 13384:87 13606:90 13829:161 13999:148 14087:170 14161:35 14167:46 14208:183 14556:37 15640:95 15726:63 16187:152 16188:167 16705:65 16810:51 17102:59 17267:45 17279:85 17280:82 17585:95 17951:63 18356:154 18944:54 19203:96 19208:116 19831:83 19973:48 19976:603 19984:128 +520:109 626:298 1040:296 1057:228 1314:99 2185:84 2990:49 3809:162 3944:141 4357:114 4957:77 5379:86 6265:84 6696:47 6866:393 7372:120 7709:179 8475:82 9006:119 9056:183 10161:102 10241:116 10244:624 10457:63 10532:33 10799:110 10848:27 11330:175 11400:58 11871:99 12406:39 12674:108 12916:139 13224:82 13369:86 13829:39 13846:168 13881:92 13929:83 14239:137 15473:75 15579:95 16081:109 16810:209 16974:302 17003:214 17487:248 17526:271 17537:20 18364:53 18599:26 19685:211 19894:64 +732:38 810:59 965:69 985:136 1052:26 1522:153 2015:45 2130:96 2339:93 2528:65 2720:14 2838:74 2934:90 3578:34 3702:93 3709:628 3882:51 3947:83 4382:55 4468:93 4554:50 4597:72 5000:67 5081:49 5131:59 5140:307 5305:48 5832:71 6229:76 6270:89 6327:122 6372:89 6514:127 6866:342 6917:165 6973:25 7709:108 8078:30 8164:88 8215:48 8306:73 8333:54 8433:81 8501:138 8568:131 8669:37 8913:55 9043:58 9683:119 9814:74 9987:38 10078:121 10441:206 10443:85 10446:62 10457:17 10532:274 10623:287 10669:89 10848:65 11018:62 11116:106 11164:113 11328:101 11400:108 11754:37 11822:79 11892:81 12170:76 12319:42 12916:10 12968:243 13006:42 13143:25 13165:49 13529:104 13640:146 13687:93 13929:23 13967:79 14650:111 14844:56 15020:86 15164:103 15178:82 15206:11 15492:42 15544:104 15586:59 15853:129 15971:114 16034:81 16049:145 16211:79 16232:48 17384:129 17896:71 17961:18 18364:29 18493:52 18535:105 18715:57 18734:69 18849:159 18964:113 19101:64 19386:74 19788:75 19801:78 19873:63 19910:144 +448:55 985:47 993:35 1214:434 1244:191 1316:46 1824:156 2091:133 2591:102 2592:192 2814:87 2851:55 3242:78 3433:31 3578:29 3647:67 3896:54 3976:131 4609:123 4713:100 4751:61 4757:102 5417:99 5874:296 5959:35 6021:40 6084:55 6153:38 6234:125 6253:55 6374:69 6866:577 6912:66 6957:101 7494:91 7709:511 7739:35 7896:76 8024:82 8082:87 8387:76 8789:35 8804:90 8903:65 9043:49 9564:66 9674:77 9814:63 9819:120 9950:175 10187:77 10532:410 10663:537 10735:33 10799:104 11085:38 11168:48 11328:43 11685:145 11704:120 11711:74 11718:7 11781:89 11871:103 11989:33 12170:32 12420:89 12528:77 12590:90 12758:25 12916:7 13210:67 13529:88 13943:126 13982:163 14015:112 14167:44 14262:167 14297:119 14446:82 15223:106 15303:43 15369:67 15651:29 15777:2 15793:43 16077:89 16211:67 16464:47 16478:75 16514:73 16609:69 17046:84 17069:125 17173:64 17288:78 17297:65 17949:54 17970:39 18050:37 18072:92 18251:117 18364:12 18768:100 18792:76 18916:91 19143:111 19187:43 19210:69 19247:61 19629:76 19774:41 19869:105 19973:229 +268:69 715:88 834:74 1052:49 1388:160 2185:46 2902:446 3012:130 3578:26 3746:231 4200:327 4224:121 4411:28 4489:63 4637:269 5523:130 5551:97 5946:119 6060:17 6234:2 6280:84 6347:177 6514:60 6866:370 7470:71 7494:186 7709:56 8488:108 8859:181 8871:73 9059:170 9415:102 9734:124 10161:100 10457:33 10483:121 10532:53 10806:14 10930:114 11014:117 11106:67 11206:115 11642:55 11871:141 12065:148 12499:98 12798:116 12916:20 13143:55 13273:32 14128:116 14161:37 14310:35 14848:66 14973:90 15427:115 15772:107 15812:127 16034:88 16118:42 16721:608 16795:105 16869:8 17104:80 17365:76 17841:148 18323:108 18394:81 18474:54 18700:83 19110:97 19232:63 19453:33 19525:98 +80:47 247:195 520:46 626:22 1039:75 1279:60 1454:69 1521:141 1810:112 1952:52 1977:92 2240:57 2345:55 2376:200 3302:25 3337:104 3578:16 3801:113 4052:105 4136:93 4259:56 4427:86 4576:42 4594:73 4707:37 5128:33 5454:121 5487:112 5890:52 6229:48 6234:173 6449:44 6658:112 6746:66 6865:120 6866:323 7363:66 7483:467 7494:264 7709:328 7729:41 7739:33 7926:68 8043:30 8164:142 8548:57 8666:275 8772:64 8814:117 9143:10 9212:83 9698:55 9971:107 10200:50 10276:56 10292:119 10457:14 10532:104 10544:5 10555:86 10848:101 10867:549 10939:98 11006:41 11303:80 11400:145 11695:108 11871:128 11981:90 12344:91 12547:55 12610:94 12623:68 12732:82 12875:87 12916:13 13218:79 13645:58 13685:121 13777:104 13857:155 13864:56 13879:10 13897:108 13929:19 14161:48 14543:32 14556:33 14697:76 15294:36 15374:53 15547:96 15643:146 15780:25 15798:124 16034:40 16745:103 17279:77 17798:86 18224:63 18364:12 18768:47 18866:85 18986:59 19095:44 19174:26 19327:31 19386:59 19654:73 +626:147 633:60 1040:218 1633:171 1761:46 1889:95 1914:109 2560:88 2751:46 2934:242 3040:169 3515:440 3561:108 3767:86 4021:47 4134:90 4703:136 4957:103 5842:37 5946:210 6027:102 6060:64 6199:61 6234:269 6686:77 6866:383 7477:100 7709:258 7896:64 8804:44 9143:106 9530:59 10161:104 10244:537 10457:47 10718:127 10848:35 11106:139 11330:258 11871:153 12133:63 12238:132 12322:109 12795:64 12916:144 12968:30 13384:57 13929:62 14162:80 14239:202 14665:85 14810:95 14947:61 15264:33 15844:81 16223:39 16974:223 17323:91 17487:183 17689:288 17903:94 18323:92 18364:78 18370:81 18372:76 19139:196 19309:133 19390:36 19676:100 19685:233 19926:66 +80:93 211:52 227:80 520:69 989:56 1040:7 1052:27 1057:109 1746:57 1987:26 1998:102 2523:123 2599:77 2630:32 2934:139 3356:99 3515:22 3578:18 3860:56 3906:109 4126:47 4131:68 4152:105 4273:40 4318:140 4472:36 4576:27 4909:201 4950:146 5081:26 5128:86 5162:124 5305:26 5308:51 5323:41 5384:119 5510:50 5627:36 5860:127 5864:100 5903:37 5956:89 6234:165 6253:34 6834:14 6866:238 6973:12 7210:75 7476:80 7494:77 7560:66 7815:60 7850:26 7873:63 7899:90 7908:79 7931:92 7955:69 8099:25 8263:113 8337:44 8615:84 8945:30 9126:85 9424:28 9565:87 10009:40 10161:100 10200:33 10241:185 10350:42 10532:67 10689:67 10735:20 10814:572 10875:48 11018:33 11136:49 11192:46 11303:117 11355:66 11668:28 11704:85 11877:116 12007:100 12136:32 12238:210 12319:22 12610:47 12712:371 12805:126 12916:11 13456:40 13639:58 13818:49 13891:36 13929:12 14067:22 14108:148 14161:10 14224:54 14456:51 14880:35 14900:53 14972:105 15239:63 15427:32 15512:47 15848:41 16144:129 16232:51 16312:495 16353:38 16464:59 16478:63 16584:22 16748:114 16810:84 16826:147 16851:139 16881:29 16934:18 17083:28 17343:176 17357:49 17708:52 17824:31 18284:17 18364:39 18460:94 18964:70 19213:65 19249:97 19277:61 19278:56 19324:48 19327:163 19386:13 19759:21 19787:129 +101:89 112:152 268:198 958:170 965:152 1040:15 1052:57 1316:138 1715:53 1933:121 1987:107 2026:91 2164:96 2295:94 2663:68 2760:74 3055:49 3086:44 3091:150 3308:45 3334:24 3471:136 3578:22 3746:268 3770:59 3800:92 3990:90 4494:140 4596:176 5041:53 5417:59 5596:69 5603:31 6067:134 6543:111 6866:770 6927:59 7680:47 7709:150 8165:47 8804:82 8895:533 8943:166 9325:37 9446:41 10043:94 10276:38 10443:114 10465:204 10532:64 10744:59 10757:4 10799:83 10913:55 11055:95 11303:105 11668:57 11685:147 11695:76 11856:69 11936:94 12238:65 12336:8 12355:124 12529:110 12615:94 12721:101 12916:12 13529:56 13535:53 13556:131 13632:142 13829:79 14310:86 14665:140 15034:68 15070:149 15586:64 15798:56 16161:398 16773:218 16885:113 16913:122 17017:69 17020:80 17528:118 17571:73 18118:105 18323:50 18364:16 18594:145 18737:79 18972:98 19215:131 19473:210 19699:85 19902:48 19926:66 19935:72 +101:78 247:46 1028:118 1312:83 1691:99 1704:95 2015:43 2114:94 2185:65 2456:81 2532:434 2538:85 2635:71 2751:72 2781:77 2789:59 2934:79 2993:79 3307:156 3466:133 3558:48 3578:6 3691:392 4457:84 4912:232 4925:68 4962:85 5043:262 5383:80 5466:190 5487:63 5869:67 5946:107 6272:355 6542:134 6865:47 6866:244 7051:62 7449:84 7494:140 7709:239 7893:57 7911:66 8152:40 8349:68 8632:80 9525:66 9747:116 10130:102 10154:124 10241:74 10357:125 10443:99 10457:17 10532:277 11106:69 11501:99 11757:75 11869:56 12136:58 12291:103 12448:30 12610:28 12616:131 12674:44 12798:102 12829:174 12916:7 13685:48 13829:93 13897:70 13929:44 14015:50 14161:19 14167:49 14330:108 14992:74 15119:133 15204:96 16103:106 16130:487 16488:18 16773:63 16810:93 16874:176 17499:44 17694:143 17742:87 18160:101 18190:132 18281:96 18737:14 19109:129 19455:57 19692:143 +520:98 552:68 617:35 626:67 773:69 1040:116 1889:14 1914:50 1957:88 2522:81 2536:159 3012:87 3210:76 3614:118 3932:55 4757:64 5231:79 5577:88 6600:116 6866:143 7462:80 7494:102 7709:173 7710:587 7896:74 8515:75 8789:64 8866:152 8925:88 9011:67 9059:82 9120:132 10142:74 10161:147 10206:69 10244:350 10457:42 10532:109 10799:118 10848:231 11107:20 11330:59 11559:90 11640:139 11685:145 11822:97 11871:109 12170:463 12238:104 12704:178 12916:55 12939:119 12968:159 13143:91 13165:60 13881:66 13886:96 14239:46 14662:324 14665:39 15438:70 15674:111 15948:142 16094:88 16114:62 16515:95 16682:96 16810:101 16905:138 16939:123 16974:102 17090:91 17487:167 17523:76 17652:90 18115:65 18364:71 19144:33 19293:96 19386:30 19685:106 19926:23 +748:11 846:81 1052:52 1683:169 1889:62 2336:65 2396:88 2497:83 2507:126 2560:47 2899:144 2934:130 3352:131 3529:57 3578:27 4174:381 4707:92 5287:94 5625:152 5680:168 6027:81 6060:8 6814:141 6864:45 6865:100 6866:319 6991:18 7462:123 7494:87 7709:22 7896:104 8240:56 8420:75 8437:150 9006:50 9274:207 10035:36 10744:324 11685:80 11822:80 12238:136 12310:80 12448:64 12798:57 12916:11 13143:144 13292:217 13468:625 13964:100 14195:126 14404:177 14461:67 14479:136 14562:57 14731:63 14915:147 15492:85 15798:104 15966:192 16224:52 16449:82 16535:163 16810:42 17340:76 17350:14 17827:88 18364:30 18372:155 18624:98 19386:50 19890:155 19921:73 +1052:61 1268:104 1560:181 1682:171 1968:84 2466:121 2571:183 2880:114 2934:33 3204:98 3433:87 3578:16 4376:97 4494:106 4573:83 4807:53 5057:190 5361:112 5370:54 5379:86 5417:123 5724:63 5937:31 6060:103 6097:137 6234:70 6372:12 6499:32 6865:117 6866:267 6868:16 7414:32 7494:157 7694:354 7709:323 8198:301 8804:165 9164:289 9429:61 9481:225 9592:132 10443:132 10508:111 10532:262 10735:91 10773:148 10799:120 11187:151 11245:60 11485:72 11518:119 11642:174 11871:177 12039:106 12916:21 12932:95 12968:40 13645:170 13881:180 13907:60 13929:55 14844:133 14880:152 15204:39 15294:104 15323:537 15780:74 15798:121 16296:60 16452:93 16810:71 17519:102 17523:148 17537:104 18364:69 18396:44 19443:256 19688:72 +360:128 626:221 732:151 1040:110 1052:103 1057:62 1268:53 1633:258 1914:164 1968:142 2895:86 4512:114 4651:88 4707:182 4708:112 5045:360 5260:60 5377:103 5487:70 5636:98 6089:170 6234:145 6301:185 6372:125 6866:88 7494:25 7690:31 7709:69 8160:79 8441:53 8548:140 8720:43 8799:133 8816:6 9379:267 9594:71 9690:132 9734:103 10161:24 10187:57 10241:59 10244:462 10256:122 10443:57 10532:90 10848:71 11642:174 11871:57 11994:252 12238:43 12599:107 12610:117 12794:63 12795:143 12796:55 12916:140 12932:150 12968:71 13143:142 13384:15 13428:26 13498:108 14239:152 14665:128 14729:69 15376:78 15948:234 15974:65 16373:143 16451:87 16797:74 16932:83 17365:97 17487:276 18367:111 18397:120 18964:87 19094:93 19232:25 19349:133 19560:99 19685:234 +96:86 488:97 749:76 863:67 1040:13 1738:101 1743:61 1801:90 1824:134 1990:97 2728:184 2820:158 2858:42 2990:55 3150:192 3433:250 3515:80 3578:7 3598:35 3882:49 3976:92 3988:58 4180:94 4279:93 4321:100 4603:131 4614:36 4723:45 5123:100 5760:143 5874:56 5959:40 6022:253 6084:62 6234:85 6269:90 6374:39 6406:36 6866:511 6925:193 7363:39 7470:127 7494:60 7584:74 7709:371 7729:49 7739:40 7844:41 7896:70 8301:72 8475:85 8504:77 8642:59 8707:64 8804:78 9221:57 9446:36 9497:11 9523:136 9614:152 9757:60 9900:146 9987:37 9989:54 10009:73 10035:166 10154:109 10195:34 10218:357 10276:67 10318:103 10377:44 10379:57 10457:34 10530:60 10532:211 10790:96 11303:178 11342:163 11400:17 11652:143 11853:172 11977:305 11990:103 12170:37 12238:167 12314:94 12378:146 12547:66 12916:14 13006:41 13139:49 13143:86 13287:92 13738:65 13829:62 13881:67 13886:143 13963:119 14404:128 14556:40 14697:45 14773:103 14947:106 15369:76 15524:73 15844:63 16211:154 16338:64 16913:108 16992:86 17006:91 17319:83 17343:127 17369:68 17679:87 17708:95 17904:67 17947:88 18050:42 18115:28 18364:43 18499:73 19117:122 19189:71 19197:79 19210:39 19739:52 19783:91 19845:32 19889:57 19921:35 19935:127 +245:360 247:73 919:141 1052:20 1054:78 1268:40 1312:114 1363:37 2990:130 3578:26 3707:190 3882:115 3914:41 4319:81 4492:76 4545:124 4655:80 4957:137 5176:85 5379:136 5494:42 6051:113 6337:94 6530:95 6865:38 6866:354 6917:42 7018:57 7051:49 7254:106 7709:164 7781:77 7908:94 7948:217 8548:53 8913:83 9192:117 9325:181 9384:86 9417:33 10035:105 10206:43 10328:92 10441:104 10532:201 10735:58 10773:47 11122:17 11441:53 11606:115 11822:30 11984:102 12238:212 12270:72 12607:37 12610:89 12916:11 12968:49 13388:88 13755:100 13896:121 13929:18 13967:119 14161:15 14774:143 14856:36 14947:70 15523:118 15780:47 15853:98 16109:62 16115:147 16518:190 16584:62 16585:794 16769:102 16810:43 17205:126 17343:42 17627:106 18160:40 18364:11 18684:64 18916:23 19157:69 19386:74 19634:57 19767:109 19801:20 19873:47 +293:4 345:106 563:168 810:73 813:61 1273:155 1721:99 2025:34 2185:147 2243:139 2630:114 2902:56 3482:130 3515:39 3578:6 3643:41 3770:176 3915:67 4308:56 4472:65 4838:117 4877:120 4978:81 5076:105 5128:39 5446:65 5965:94 6437:103 6602:54 6615:5 6629:85 6807:300 6826:186 6866:142 7194:183 7199:25 7287:9 7334:159 7494:316 7709:108 7831:112 7872:137 8078:58 8433:53 8703:48 8722:78 8723:97 8903:98 8909:391 8972:88 9140:87 9159:206 9320:107 9858:109 10161:241 10532:153 10848:116 10925:54 11085:160 11125:66 11206:96 11303:125 11400:134 11454:143 11478:261 12238:39 12318:32 12607:46 12798:44 12916:3 13515:51 13540:88 13881:234 14133:599 14161:19 14224:195 14331:46 14404:84 15044:93 15178:157 15265:152 15295:143 15433:57 16404:40 16478:137 17104:40 17348:43 17459:94 18100:67 18334:30 18372:48 18454:35 18684:80 18768:56 18852:85 19040:92 19109:79 19255:204 19473:137 19910:69 19916:115 19947:92 +1833:26 2546:45 2630:231 2728:365 2785:70 2828:471 2990:17 3307:65 3360:91 3614:175 4230:140 5459:145 5783:106 5997:102 6024:452 6234:156 6372:86 6866:413 7004:43 7156:104 7494:11 7682:79 7709:133 7915:106 10035:127 10292:64 10386:121 10443:52 10532:234 10705:388 10848:24 10899:155 10936:80 11171:104 11871:1 12355:49 12798:120 12916:7 13378:78 13384:133 13471:124 13881:87 14187:97 14310:71 14312:116 14556:159 14780:60 15323:437 15610:49 15841:25 16109:59 16136:81 16140:71 16223:121 16302:81 16399:85 16478:35 17069:111 17841:81 18744:62 18916:106 18920:99 19278:135 19769:124 +72:112 203:61 371:4 657:31 1040:28 1244:91 1268:96 1427:35 1472:89 1522:130 1553:30 1743:64 1889:61 1951:51 1952:33 1990:25 2015:68 2252:124 2339:47 2851:32 2894:41 2897:93 3086:20 3296:79 3449:38 3497:67 3515:21 3569:49 3578:34 3932:46 4085:60 4412:67 4656:57 4664:49 4707:23 4842:115 4995:37 5041:24 5057:20 5081:50 5131:30 5166:38 5507:56 5551:26 5596:64 5874:29 5959:21 6027:31 6549:30 6577:61 6866:299 6896:47 6917:28 7052:45 7055:37 7248:35 7360:43 7664:98 7709:206 7893:30 7896:132 8034:44 8078:62 8146:58 8152:64 8241:478 8262:28 8298:31 8737:52 8804:167 8943:26 9069:94 9089:143 9099:108 9180:22 9234:63 9247:180 9433:82 9757:31 9814:37 9987:39 10218:31 10276:17 10457:9 10511:24 10532:203 10735:19 10750:67 10848:186 10970:76 11322:106 11328:51 11342:21 11409:29 11449:100 11487:71 11624:35 11642:200 11864:101 11877:28 12173:138 12209:47 12238:108 12317:68 12319:21 12399:225 12448:32 12509:63 12548:73 12710:120 12720:39 12836:76 12916:13 13114:39 13165:50 13246:49 13332:38 13384:19 13519:40 13559:54 13688:39 13891:35 14048:59 14433:96 14523:68 14665:16 14715:58 14744:52 15102:551 15171:64 15279:113 15330:35 15386:37 15411:74 15492:42 15586:60 15617:57 15686:55 15780:16 16248:106 16406:35 16417:93 16464:28 16651:41 16795:28 16900:71 17045:30 17104:64 17173:19 17267:26 17277:34 17279:24 17369:36 17518:76 17711:29 17756:44 17949:32 17952:37 17970:46 18100:71 18364:89 18413:36 18573:62 18622:34 18775:99 18944:75 19000:272 19117:32 19219:120 19266:42 19453:104 19473:97 19739:27 19774:48 19783:47 19809:26 19812:39 19873:32 19889:59 19921:37 19926:105 19973:27 +1205:124 1251:88 1715:207 2185:99 2751:39 2990:137 3084:51 3115:121 3569:414 3712:106 4085:119 5379:143 5384:53 5724:117 6010:136 6234:103 6372:92 6866:118 7272:74 7494:162 7709:148 8260:81 8703:216 9059:96 9454:470 9966:112 10164:6 10443:127 10753:111 10848:98 11392:168 11871:88 12206:103 12238:96 12412:615 12529:83 13645:307 13829:182 13907:116 13919:88 13929:99 14067:180 14227:74 15204:97 15295:102 15523:111 18680:70 18737:72 19786:4 19926:67 +101:19 178:54 203:29 247:46 268:52 404:42 561:36 584:36 658:34 749:37 799:84 891:141 1042:40 1070:25 1166:60 1188:37 1316:142 1427:33 1699:28 1715:23 1773:71 1943:35 1951:47 1987:23 1990:24 2015:42 2062:44 2144:51 2157:101 2173:43 2290:53 2359:51 2530:34 2653:150 2746:37 2897:29 3086:57 3279:91 3280:37 3296:74 3433:17 3443:84 3449:35 3578:10 3645:45 3711:40 3882:24 3915:67 3975:59 4103:36 4200:27 4285:65 4328:27 4412:31 4495:41 4599:54 4614:35 4698:42 4707:43 4722:101 4902:69 5056:56 5057:19 5131:28 5178:58 5298:70 5361:45 5448:40 5507:26 5563:48 5735:67 5743:38 5799:60 5803:33 5821:41 5867:109 6014:60 6153:42 6172:36 6234:263 6253:31 6374:19 6837:27 6866:275 6996:65 7007:61 7055:70 7064:60 7250:33 7304:55 7333:47 7422:43 7494:108 7596:57 7658:32 7709:107 7746:30 7797:40 7931:41 7947:31 8034:42 8078:29 8150:66 8216:52 8218:30 8293:47 8676:67 8737:73 8789:121 8804:23 9032:58 9056:24 9087:116 9102:71 9109:34 9119:36 9136:56 9257:116 9261:117 9475:56 9507:51 9619:54 9657:164 9695:96 9757:29 9792:112 9860:28 9941:36 9989:26 10003:105 10319:30 10377:22 10443:94 10457:17 10470:34 10532:371 10600:45 10665:39 10735:18 10746:91 10875:43 11005:46 11026:432 11120:58 11125:33 11129:47 11147:154 11192:41 11214:54 11224:47 11244:54 11258:48 11277:42 11328:24 11330:23 11332:106 11359:65 11380:145 11505:64 11518:239 11539:65 11552:58 11624:130 11686:42 11695:71 11856:30 11877:78 11942:86 11974:43 12191:55 12236:52 12268:38 12419:60 12448:15 12517:39 12781:70 12876:82 12916:2 13044:66 13151:70 13163:44 13433:35 13470:42 13525:69 13885:31 13891:32 13964:24 14040:40 14067:100 14161:19 14185:83 14198:52 14313:46 14358:184 14375:23 14404:21 14406:72 14501:26 14556:39 14653:37 14696:68 14749:28 14851:19 14987:33 15080:37 15090:58 15303:24 15433:28 15492:20 15512:42 15552:34 15595:50 15697:60 15726:34 15811:3 16034:97 16048:54 16094:35 16145:64 16302:82 16369:39 16432:50 16493:71 16510:44 16514:41 16581:58 16698:67 16793:151 16810:99 16905:46 16998:45 17045:28 17104:60 17189:251 17233:42 17267:24 17272:27 17336:131 17340:73 17437:47 17471:269 17493:37 17593:67 17608:56 17784:29 17947:43 17970:22 18115:36 18209:65 18295:111 18314:29 18353:70 18406:67 18435:85 18557:42 18821:34 18832:43 18866:50 18962:65 19174:15 19465:28 19476:48 19739:51 19812:74 19854:12 19921:17 +95:68 303:43 349:54 651:144 732:25 989:158 1181:54 1258:100 1313:41 1643:117 1914:27 1990:66 2025:39 2062:150 2158:206 2269:58 2275:53 2312:185 2339:31 2528:95 2530:47 2832:50 2851:168 2902:38 2920:74 2990:62 3086:26 3266:115 3296:68 3449:49 3515:27 3578:53 3860:35 3882:33 3911:47 3932:30 3976:35 4153:327 4394:64 4412:43 4545:43 4597:94 4617:48 4687:82 4802:108 5233:363 5240:146 5241:412 5379:51 5551:33 5572:68 5874:76 5898:78 5937:83 5959:27 6153:29 6372:102 6554:174 6790:63 6866:491 6917:36 7336:47 7363:27 7560:41 7658:44 7709:128 7850:33 7902:63 7910:81 7947:42 8006:65 8016:59 8124:86 8404:54 8548:23 8789:141 8943:132 9043:38 9109:146 9275:70 9424:35 9580:78 9596:53 9614:102 9814:48 9894:45 9898:131 9923:37 9962:125 9987:25 10154:70 10161:127 10218:160 10338:85 10457:23 10532:272 10535:117 10735:50 10744:35 10795:53 10799:206 10923:50 10925:37 11007:156 11014:40 11168:111 11224:33 11380:100 11400:98 11781:103 11871:90 11964:92 12082:79 12363:49 12413:62 12442:99 12508:78 12674:100 12704:92 12916:13 12968:110 13151:49 13165:32 13433:48 13479:64 13853:57 13894:39 14067:28 14161:26 14178:57 14385:115 14501:72 14665:21 14787:71 15020:57 15056:61 15303:33 15355:64 15574:83 15780:20 15810:112 16053:89 16112:46 16232:31 16382:41 16406:90 16464:36 16523:59 16591:55 16637:55 16795:36 16912:76 16913:36 16997:31 17083:34 17104:55 17148:52 17173:25 17385:128 17500:55 17571:43 17784:40 18043:63 18506:189 18711:50 18854:70 19109:144 19174:21 19232:91 19240:59 19278:23 19386:16 19559:94 19809:33 19973:35 +51:46 431:36 493:54 494:5 1040:39 1052:11 1062:125 1166:26 1316:73 1522:63 1886:54 1981:128 2275:67 2630:49 2717:32 3086:32 3298:53 3578:6 3870:24 4308:24 4387:35 4424:556 4554:41 4576:21 4795:37 4826:26 4906:57 5057:16 5081:20 5128:33 5417:156 5655:178 6011:26 6253:26 6372:55 6384:59 6444:53 6790:20 6866:183 7051:27 7210:29 7494:133 7570:80 7709:52 7723:36 8214:57 8215:20 8416:29 8548:14 8666:28 8703:42 8786:173 8913:22 9800:76 9960:33 10161:69 10441:28 10532:141 10626:121 10636:90 10682:55 10735:16 10773:26 10789:38 10796:113 10805:49 10834:42 10999:87 11089:100 11106:135 11181:125 11222:41 11224:20 11422:32 11877:67 12029:130 12136:75 12156:728 12238:103 12322:77 12560:44 12607:20 12610:60 12916:9 12935:17 12968:53 13006:35 13038:57 13645:29 13881:38 13929:9 13964:20 14161:16 14650:45 14665:13 14973:19 15063:102 15066:23 15335:33 15473:99 15853:26 16248:29 16401:63 16432:22 16581:50 16810:88 16852:66 16866:136 16936:27 17039:27 17102:47 17145:90 17578:46 17772:35 17782:62 17850:34 18397:67 18694:30 19174:13 19186:25 19327:63 19386:30 19758:114 19774:20 19823:172 19873:26 +40:443 1040:83 1052:155 2183:60 2603:133 2720:89 2922:60 2990:134 3801:75 3874:132 4179:145 4427:55 5831:41 6153:267 6234:36 6866:480 7494:149 7549:45 7709:116 7835:148 8804:74 10035:126 10448:34 10532:138 10848:48 10988:139 11070:47 11106:28 11400:128 11859:64 11871:259 11978:81 12582:46 12599:114 12674:205 12916:11 13400:76 13520:78 13594:24 13881:248 14015:101 14889:73 15627:107 15748:646 15770:81 15777:93 16399:148 16566:183 16584:248 16810:62 17166:173 17479:48 17885:17 18364:88 18964:135 19181:56 19255:119 19288:138 19890:462 19932:128 +80:55 101:19 247:46 257:57 281:46 303:31 520:88 549:58 869:44 981:27 1040:26 1348:36 1522:72 1799:47 1943:35 2015:84 2528:84 2591:116 2612:49 2630:28 2635:105 2707:44 2720:130 2789:87 2808:154 3038:105 3150:31 3187:23 3308:19 3361:84 3515:19 3578:35 3663:66 3713:30 3784:121 3965:574 3988:28 4035:122 4061:41 4174:44 4273:36 4308:28 4469:41 4526:25 4554:23 4614:17 4664:46 4711:34 4720:45 4723:22 4792:100 4893:61 4978:40 5041:23 5128:19 5269:80 5305:46 5347:37 5361:22 5367:36 5529:121 5551:24 5627:64 5878:29 6011:30 6132:39 6238:43 6253:30 6285:144 6374:19 6439:40 6449:51 6574:45 6705:143 6762:62 6866:130 6927:25 7257:84 7363:19 7494:55 7560:29 7709:375 7850:23 7874:146 7938:32 8074:54 8078:14 8125:93 8262:26 8268:103 8416:34 8548:33 8601:38 8609:80 8653:79 8703:48 8817:54 8866:118 8913:26 9126:38 9252:177 9328:59 9386:52 9422:127 9446:18 9683:42 10241:184 10264:37 10377:64 10441:32 10443:139 10470:33 10530:29 10535:28 10541:36 10631:37 10735:36 10744:25 10848:78 10925:53 11018:29 11095:49 11119:87 11170:66 11224:70 11316:47 11342:20 11511:106 11672:11 11704:11 11729:4 11958:34 11981:52 12206:72 12238:35 12319:39 12379:30 12387:49 12448:45 12607:23 12610:96 12721:116 12728:43 12916:25 12935:39 12949:93 12976:126 13166:48 13435:35 13519:37 13588:41 13685:23 13739:55 13878:43 13881:65 13929:54 13964:47 14042:63 14067:20 14161:19 14195:29 14375:230 14574:37 14851:19 15029:86 15044:92 15226:32 15492:20 15552:34 15586:28 15740:141 15853:30 15888:106 15925:156 16022:99 16029:78 16052:66 16060:46 16094:274 16325:26 16353:135 16464:26 16584:19 16881:26 16913:26 16936:31 17017:30 17236:45 17272:27 17318:30 17499:144 17772:40 17904:32 18089:51 18115:88 18146:102 18160:25 18364:69 18476:40 19053:45 19144:145 19182:99 19210:38 19266:39 19327:162 19386:35 19429:34 19455:56 19473:45 19541:48 19742:62 19834:30 19910:34 19926:223 19935:31 19947:91 +615:64 764:39 981:57 1040:13 1052:25 1422:109 1657:122 1968:35 1987:48 2213:63 2407:73 2528:35 2634:78 2648:38 2820:53 2851:63 2868:112 3266:70 3280:76 3578:7 3661:98 3713:63 3932:89 4085:62 4512:9 4549:5 4623:166 5057:79 5724:39 5760:34 6056:120 6315:62 6790:47 6865:49 6866:631 6927:52 6953:20 7097:79 7118:50 7494:148 7592:217 7850:49 8325:59 8646:101 8937:139 8943:99 9187:140 9989:54 10208:27 10467:395 10532:259 10673:30 10779:63 10799:74 11272:306 11400:192 11470:210 11721:142 11871:95 11953:275 12136:60 12173:117 12238:55 12529:165 12607:47 12610:29 12916:12 12968:142 13262:84 13380:61 13604:114 13713:67 13881:95 13929:23 14161:19 14184:39 14372:103 14440:566 15122:65 15183:118 15204:89 15492:41 16143:84 16353:141 16688:148 16798:19 17017:62 17102:87 17182:29 17502:156 18160:52 18364:29 18594:65 18734:67 18855:101 19348:67 19386:24 19823:83 19889:115 19935:64 +102:191 247:136 732:106 989:94 1011:141 1040:13 1889:185 2086:96 2160:119 2339:44 2630:112 2820:51 2847:63 2894:38 2985:108 3084:57 3490:84 3500:181 3578:38 3713:60 3988:56 4308:55 4472:64 4576:144 5128:227 5131:111 5201:61 5660:88 5678:66 5869:132 6331:96 6374:38 6549:56 6666:66 6751:124 6837:215 6859:60 6865:92 6866:599 6917:103 6973:53 7144:157 7709:21 7777:110 7896:52 8374:66 8407:171 8548:98 8666:64 8804:83 8817:54 8956:92 9212:116 9885:71 10172:56 10359:52 10535:167 10735:72 10753:26 10848:117 10925:53 10953:74 10998:55 11153:68 11224:47 11303:76 11487:65 11503:89 11695:87 11781:49 12345:118 12509:48 12599:144 12607:45 12610:136 12798:21 12916:29 12968:71 12988:100 13006:39 13685:47 13829:76 13929:43 14122:113 14161:18 14237:117 14251:104 14375:273 14851:37 15119:107 15305:71 15492:39 15600:176 15757:618 15867:99 16007:97 16034:53 16432:49 16567:92 16995:49 17017:59 17386:8 17705:121 17951:50 17997:94 18013:83 18364:41 18684:78 18768:164 18875:91 18943:112 19024:87 19117:59 19174:30 19247:76 19386:69 19873:58 19908:104 19921:34 +626:109 1630:407 2243:77 2934:189 2990:39 3295:46 3307:83 3328:89 3370:84 3458:96 3529:69 3849:155 3932:180 4109:106 4744:120 4962:77 5169:117 6195:74 6616:143 6840:94 6866:262 7124:145 7494:282 7684:55 7709:245 8262:215 8804:34 8903:102 8915:57 9276:40 9594:95 9876:67 9908:94 10532:37 10637:36 10688:85 10821:82 11140:268 11871:79 12238:98 12916:14 13211:66 13780:478 13829:84 13907:132 14236:29 14258:91 14606:282 14621:44 14872:569 14880:38 15119:31 15987:88 16029:183 16232:188 16315:82 16356:72 17069:104 17161:7 17797:153 17961:111 18115:194 18364:58 18907:69 19293:22 19309:98 19923:73 +953:94 2602:291 2621:90 2751:76 2990:128 3307:48 3457:81 3614:93 4962:51 5487:52 6234:133 6866:322 7494:148 7709:10 7710:589 9505:160 9870:302 10035:46 10244:316 10443:71 10532:75 10729:141 11564:14 11685:72 11994:230 12170:278 12595:279 12704:63 12916:58 13465:102 13881:222 14013:106 14179:89 14239:277 14325:54 14429:100 14458:173 14837:88 14889:108 14947:148 15523:100 15674:122 15779:77 16478:83 16974:153 17255:144 17270:46 17487:126 17782:47 17961:48 18035:154 18115:88 18493:192 19024:42 19255:40 19325:131 19685:107 +17:130 80:173 257:69 832:45 833:110 889:87 923:134 1052:38 1715:143 1951:74 2004:47 2751:101 2857:84 2990:52 3361:134 3578:30 4444:122 4494:297 4576:76 4675:49 4795:135 5166:110 5466:127 5594:79 5946:48 6027:118 6065:90 6467:381 6722:53 6866:433 7111:115 7183:66 7363:60 7494:194 7696:64 7709:195 7797:123 7934:140 8149:154 8266:48 8356:97 8759:157 8772:69 8804:89 9339:158 9511:84 9576:52 9859:166 10154:172 10297:27 10357:12 10407:136 10486:74 10516:13 10735:57 10777:129 11164:39 11440:515 11675:307 11705:12 11868:68 12363:220 12607:142 12610:86 12680:163 12766:112 12916:3 12935:61 13482:132 13799:57 13881:142 13929:34 13967:116 14310:28 14358:190 15022:197 15083:183 15548:160 15549:72 15678:66 15777:82 15780:46 15938:62 16034:4 16223:105 16766:124 16767:63 17374:76 17403:60 17425:153 18115:68 18364:22 18879:14 19095:81 19144:77 19327:113 19379:180 19386:36 19873:92 19946:68 +247:50 286:79 755:90 822:59 875:85 1052:27 1179:56 1330:150 1481:70 1684:149 1759:84 2367:102 2493:88 2511:110 2894:42 3361:82 3578:28 3932:142 3988:62 4273:158 4308:122 4309:64 4376:44 4576:108 4670:116 4894:134 5053:58 5178:63 5417:112 5460:88 5937:44 5946:69 6105:79 6374:42 6777:151 6837:60 6859:67 6866:153 7494:141 7916:713 8099:49 8408:48 8714:69 8804:118 9136:124 10154:207 10161:84 10187:89 10350:83 10443:36 10532:92 10753:131 10799:131 10998:61 11028:20 11245:172 11856:65 11871:78 12136:63 12469:124 12610:91 12674:107 12916:22 12968:126 13006:44 13829:154 13859:109 13881:78 14161:41 14179:100 14434:160 14683:491 16146:116 16232:50 16406:72 16766:88 16810:174 16995:55 17145:134 17329:75 17456:25 17457:63 17523:65 17828:36 18115:174 18364:15 18768:61 18885:111 18897:8 18916:69 18943:102 19109:102 19122:102 19255:79 19278:37 19386:51 19415:112 19553:52 19560:50 19935:68 +314:50 413:202 464:111 569:16 612:36 707:55 891:32 904:39 991:56 1070:28 1166:34 1208:124 1244:49 1798:47 1824:100 1915:78 1987:239 1990:27 2338:177 2345:37 2425:55 2446:69 2528:28 2630:33 2720:111 2789:33 2832:84 2934:211 2990:50 3150:36 3433:60 3578:18 3614:130 3882:28 3932:99 4362:50 4451:74 4460:40 4472:37 4494:47 4554:27 4582:90 4597:39 4614:20 4687:68 4722:461 4802:90 5128:22 5147:138 5163:113 5178:33 5220:117 5234:52 5339:229 5361:26 5448:46 5466:113 5867:125 5959:22 6027:114 6099:80 6132:46 6172:164 6238:50 6374:22 6712:53 6790:26 6810:74 6866:89 6974:54 7055:40 7076:53 7298:63 7388:129 7484:87 7709:377 7746:35 8036:7 8150:114 8293:54 8466:49 8495:62 8555:54 8586:49 8737:28 8771:57 8804:137 8988:29 9109:62 9248:63 9424:29 9446:20 9631:203 9757:67 9874:76 9960:94 10161:135 10241:79 10277:97 10377:49 10443:100 10532:50 10794:43 10934:32 10989:71 10993:78 11168:31 11262:157 11277:49 11322:29 11422:43 11518:27 11552:67 11619:93 11624:37 11833:119 11974:50 12054:63 12238:74 12314:52 12319:45 12406:101 12448:17 12570:58 12773:103 12804:36 12916:1 12968:188 12987:60 13063:11 13765:93 13863:34 13881:137 13885:36 13943:85 14032:137 14222:29 14361:110 14404:71 14535:65 15179:60 15342:48 15347:202 15446:86 15533:118 15575:69 15618:56 15853:35 16034:179 16094:40 16342:133 16353:39 16582:76 16843:65 16897:78 16995:28 16997:131 17080:74 17471:31 17500:45 17512:52 17746:51 17771:41 17862:59 17898:51 17928:119 17953:51 18016:12 18050:70 18121:85 18323:53 18406:191 18451:60 18711:83 19085:45 19117:68 19153:115 19172:32 19210:22 19272:42 19353:82 19508:60 19602:53 19739:88 19914:49 19924:230 19973:29 +33:71 410:81 568:70 732:51 1039:63 1040:9 1244:60 1427:47 1456:73 1824:31 1981:5 1987:33 1990:68 2062:31 2185:52 2737:58 2934:52 2969:141 2990:132 3012:71 3022:18 3187:33 3308:28 3367:167 3384:63 3433:50 3578:27 3686:100 3746:164 3896:86 4383:48 4468:63 4603:45 4757:10 4831:73 4874:101 5043:46 5095:67 5166:50 5428:57 5815:47 6132:56 6234:50 6269:62 6285:19 6315:42 6866:227 6956:130 7055:49 7182:242 7371:53 7494:224 7640:46 7658:45 8301:50 8306:49 8475:93 8495:76 8593:55 8601:55 8957:83 8986:40 9148:56 9325:62 9394:32 9757:42 10001:58 10043:45 10109:82 10161:126 10235:151 10237:66 10319:42 10361:13 10457:24 10530:83 10532:191 10620:34 10735:26 10744:72 11168:38 11342:170 11432:116 11518:34 11582:75 11695:66 11884:53 11900:64 12238:61 12556:51 12582:74 12610:20 12815:58 12916:7 12956:65 13019:93 13561:58 14028:70 14131:46 14161:13 14199:102 14697:63 14749:40 15160:158 15294:30 15325:71 15355:99 15433:80 16015:86 16029:127 16160:65 16651:600 16773:313 17104:85 17173:25 17264:61 17314:85 17457:41 17528:81 17771:51 17824:78 17862:73 17904:92 17942:58 17970:122 18364:20 18441:215 18622:45 18737:77 18771:79 18813:44 18964:35 19041:88 19167:78 19174:86 19210:27 19247:125 19255:127 19288:115 19293:58 19386:33 19752:122 19882:64 19883:70 19935:44 19973:181 +173:65 416:199 1110:84 1659:53 1889:153 1968:160 2194:128 2647:112 2990:105 3071:99 3280:174 3372:118 3578:15 3590:43 3661:92 3882:228 4510:68 4614:83 5746:108 5946:73 6234:67 6384:325 6406:143 6866:310 6965:98 7494:143 7709:15 9657:95 10441:155 10773:141 10899:46 11383:93 11668:235 12156:617 12448:142 12935:93 13778:163 13929:52 13967:177 13999:119 15108:96 15355:100 15378:74 15622:11 15777:111 15853:145 16466:37 16705:330 16905:74 17374:128 18550:76 19109:107 19386:221 19801:70 19926:100 +184:59 867:116 1040:17 1057:47 1522:64 1831:63 1889:57 1968:88 2634:99 2726:183 2751:115 2934:252 3167:196 3226:46 3578:17 3849:30 4173:115 5081:62 5128:51 5230:140 5361:59 5579:85 6060:41 6134:139 6449:68 6684:94 6705:43 6714:637 6866:632 7088:170 7229:80 7494:236 7600:150 7709:202 7911:44 8000:121 8078:38 8152:53 8218:80 8370:147 8667:78 8804:68 9043:72 9316:112 9990:49 10035:64 10161:198 10241:167 10276:43 10443:82 10457:22 10532:135 10612:110 10735:144 10787:83 10848:127 11316:125 11401:91 11668:130 11671:99 11871:59 11877:68 12466:90 12605:100 12916:15 13006:105 13166:128 13625:350 13632:81 13829:91 13881:59 13929:29 13964:62 14161:49 14310:130 15119:106 15492:52 15747:73 15859:218 15884:180 15950:95 16464:69 16485:84 16705:183 17526:188 17600:120 18364:18 18921:42 19242:77 19255:64 19420:72 +985:150 1057:135 1443:61 1691:9 2528:104 2659:194 2751:99 2840:413 2934:180 2990:274 3300:126 3425:31 3804:11 3902:213 4191:291 4383:152 4598:166 4722:150 5383:178 5660:398 5670:82 5683:91 5937:48 6027:43 6234:276 6247:73 6866:339 7051:138 7494:149 7709:158 7722:115 8151:71 8342:117 8572:113 8760:23 8891:50 9614:111 9781:239 9966:96 9987:163 10035:135 10233:19 10241:63 10443:76 10532:210 10799:134 10848:156 11326:91 11685:99 11869:40 11871:295 12132:64 12201:88 12238:121 12448:67 12916:52 12917:81 13384:85 13403:101 13407:134 14067:90 14543:105 15102:177 16157:144 16223:72 16478:173 16717:185 17356:204 17430:184 17737:183 17821:122 18344:93 18364:31 19109:19 19255:81 19278:75 19455:127 19756:173 +183:127 251:85 520:76 926:274 1987:157 3051:101 3150:71 3304:98 3307:123 3578:15 3713:69 3900:39 4182:46 4576:166 5128:44 5824:86 6065:66 6315:68 6866:426 7287:78 7379:121 7494:81 7560:67 7709:559 7845:82 7938:74 8615:171 8666:73 9109:62 9116:88 9565:88 9610:88 9960:118 10532:357 10779:277 10848:86 10998:63 11052:91 11224:54 11303:100 11871:114 12082:102 12238:116 12380:73 12916:2 12958:64 13469:16 13881:76 13929:50 14533:101 14593:621 14612:96 15119:57 15204:74 15448:119 15489:123 15723:49 15844:68 15997:119 16034:117 16382:119 16401:78 16782:138 16806:507 16831:126 16919:65 17368:161 17519:105 17818:133 17928:118 18779:100 18894:66 19244:126 19255:98 19278:38 19305:123 19386:26 19787:51 +151:218 258:102 428:33 626:190 1038:140 1040:47 1249:85 1395:49 1914:71 1981:23 2008:39 2606:96 2720:84 2872:101 3072:24 3244:84 3484:93 3614:139 4148:156 4384:24 4455:87 4723:159 4956:3 5298:111 5956:73 6168:103 6615:55 6866:126 7055:126 7089:128 7494:52 7659:59 7709:526 8227:123 8638:120 9442:189 9567:211 9984:27 10035:29 10244:398 10497:37 10532:90 10848:94 11164:105 11631:106 11686:302 11775:53 11871:6 12448:109 12610:303 12674:117 12916:142 12992:109 13165:170 13285:124 13529:93 13829:112 13846:146 14239:65 14289:99 14665:55 15174:19 15204:69 15948:202 16034:128 16804:71 16810:139 16974:145 17255:82 17487:238 17621:87 17961:68 17990:261 18364:126 18372:167 18397:88 18481:95 18916:111 18943:103 19109:114 19187:88 19309:103 19536:258 19685:202 19829:220 +30:126 626:292 1040:145 1052:91 1202:97 1316:94 1914:144 2407:16 2528:84 2934:140 3096:77 3361:117 4236:135 4573:42 4723:163 4957:175 5304:158 6310:67 6866:280 7055:129 7709:244 8342:12 8661:175 8693:190 8920:411 8954:127 9112:160 9460:76 9596:287 9711:53 9935:19 10244:306 10637:23 10717:110 10750:61 10872:228 11871:73 12170:269 12582:47 12595:270 12610:103 12629:127 12716:66 12916:80 13429:39 13881:65 14665:113 15020:306 15204:133 15387:141 15448:193 16325:197 16432:186 16726:80 16974:148 17487:122 17640:60 17880:104 19685:207 +151:23 162:107 171:47 484:118 516:32 520:54 559:80 634:100 636:56 732:28 926:32 1022:76 1052:10 1070:19 1282:89 1532:50 1620:64 1801:34 1984:80 2276:54 2351:34 2418:203 2792:81 2847:78 2990:76 3065:61 3292:34 3308:31 3578:28 3755:55 4077:47 4194:52 4308:65 4362:68 4526:39 4960:52 5094:34 5163:73 5227:146 5361:35 5434:93 5753:36 6043:114 6234:172 6374:15 6625:129 6652:36 6706:88 6866:131 7055:14 7097:60 7379:83 7494:71 7543:90 7709:279 7739:31 8374:26 8475:62 8548:52 8615:59 9043:86 9056:19 9565:89 9717:91 9731:55 9900:73 10035:42 10121:101 10270:305 10276:13 10530:46 10795:53 10799:93 10848:150 10875:34 10998:44 11085:60 11245:113 11303:89 11400:134 11588:22 11748:52 11871:185 11877:41 11999:643 12122:107 12161:22 12238:97 12319:47 12364:74 12448:82 12604:80 12616:42 12772:99 12778:253 12789:85 12798:63 12820:92 12916:5 12968:200 13006:16 13258:23 13388:53 13685:19 13709:70 13881:33 14119:80 14400:78 14556:15 14612:100 14749:22 14810:149 14871:98 15215:65 15276:83 15385:52 15430:21 15744:46 15777:72 15853:48 15869:22 16186:102 16211:59 16406:51 16584:15 16613:49 16697:39 16810:66 16881:20 16887:111 16929:85 16943:187 16995:19 17000:77 17069:84 17170:122 17330:91 17430:64 17841:34 18306:81 18325:100 18622:25 18813:73 19210:120 19386:55 19443:161 19455:22 19636:33 19794:43 19935:49 +247:25 732:20 743:68 1040:7 1052:27 1715:26 1889:10 1951:26 1981:54 2530:38 2560:76 2630:32 2720:89 2751:165 2820:85 3237:33 3307:221 3565:79 3578:18 3860:28 3988:31 4737:68 5128:21 5379:87 5428:45 5473:75 5730:595 5934:35 6126:55 6335:65 6424:68 6428:87 6449:29 6757:74 6866:254 6917:29 6927:56 6994:48 7097:42 7248:36 7366:49 7494:186 7560:33 7709:188 7729:27 7850:26 7945:59 8099:24 8349:38 8420:77 8515:97 8548:37 8710:141 8789:78 8804:234 9221:31 9365:70 10154:40 10430:41 10535:31 10998:31 11126:119 11234:71 11487:37 11519:84 11871:265 12017:78 12599:113 12610:15 12798:72 12829:48 12916:6 13191:44 13384:139 13435:39 13529:78 13565:680 13891:36 13929:12 14081:138 14146:36 14161:21 14844:30 15045:67 15376:57 15493:114 15586:62 15780:16 15798:54 16028:46 16339:56 16450:172 16584:22 16609:43 16724:120 16739:74 16810:54 16947:120 17173:20 17278:74 17531:44 17571:35 18010:31 18314:32 18364:15 18493:55 18734:36 18860:107 18912:42 19117:33 19261:172 19384:41 19386:13 19487:66 19774:100 19809:27 19910:38 19935:69 +247:54 797:84 930:313 1037:65 1040:31 1052:29 1111:116 1156:92 1978:145 2269:38 2415:111 2446:77 2751:59 2990:84 3097:168 3136:102 3425:81 3578:15 3713:72 3976:97 4097:57 4554:55 4614:82 4646:82 5058:122 5305:54 5524:117 5574:118 5670:77 5717:117 6306:111 6822:73 6865:55 6866:159 7311:294 7494:320 7560:70 7666:239 7709:58 7908:40 8148:254 8420:116 8515:143 8705:35 10254:24 10390:134 10443:86 10821:15 11224:56 11822:44 11871:151 12238:101 12448:35 12582:59 12605:89 12607:54 12610:33 12720:86 12749:90 12916:14 13560:81 13631:93 13685:56 13691:133 13829:146 13891:76 13929:52 14530:109 15294:49 15646:135 15780:35 16044:101 16081:66 16154:142 16475:147 16478:46 16810:71 16817:87 17017:70 17036:53 17393:645 17438:67 17560:131 18115:145 18364:33 18894:68 19455:67 +520:147 860:86 904:62 1071:132 1188:221 1273:102 1889:220 2062:131 2194:81 2449:111 2674:58 2990:65 3045:123 3272:101 3620:49 4238:34 4399:50 4590:627 4637:68 5304:88 5378:83 5417:21 5507:158 5759:49 5871:148 5883:50 6234:110 6347:82 6866:485 7091:135 7356:188 7494:96 7709:399 7848:57 8010:54 8078:43 8173:81 8333:231 8488:108 8507:83 8925:61 9511:69 9792:38 10161:96 10187:35 10241:113 10443:88 10457:100 10472:255 10799:48 11106:69 11206:149 11322:75 12124:151 12616:175 12916:90 13013:36 13529:81 13829:76 13881:134 14045:140 14161:253 14313:34 14733:245 14848:100 14973:272 15115:158 15772:81 16676:79 16795:158 17242:38 17745:104 18157:47 18952:106 19027:63 19041:93 19247:63 19255:147 19926:150 +579:120 626:105 1914:156 2309:292 2528:108 2707:356 2720:93 3051:104 3213:37 3361:127 3904:91 4021:81 4283:128 4473:86 4508:108 5946:93 6225:112 6276:129 6298:76 6577:137 6705:35 6866:410 6984:117 7494:90 7709:190 8078:231 8239:111 8419:79 8915:108 10244:220 10848:74 11685:2 11871:74 11923:113 11971:259 12238:248 12582:92 12595:291 12902:29 12916:107 12992:242 13529:95 13881:66 14168:130 14194:65 14239:289 14258:93 14665:122 15217:105 15264:52 15359:98 15505:88 15785:48 16029:102 16302:120 16810:127 17083:200 17247:68 17487:262 17711:218 18157:137 18364:56 18493:200 18869:61 18906:56 19255:131 19278:134 19480:113 19685:334 +239:102 413:99 605:81 615:29 659:29 746:91 1040:18 1070:23 1501:54 1549:23 1597:117 1622:45 2282:55 2398:231 2528:114 2630:80 2783:39 2894:36 2934:241 2990:82 3012:96 3237:56 3586:32 3664:29 4094:54 4482:54 4674:40 4770:47 5041:64 5071:69 5367:34 5427:54 5446:95 5625:33 5981:47 6044:51 6234:48 6274:23 6404:40 6602:25 6642:25 6866:234 7080:140 7109:50 7191:41 7448:150 7494:51 7510:22 7709:184 7729:22 7841:96 7959:48 8099:21 8214:93 8350:87 8488:70 8794:198 8961:121 8963:76 8996:818 9109:92 9152:62 9221:155 9250:56 9593:95 9987:17 9989:24 10229:31 10241:42 10394:54 10457:8 10483:136 10620:22 10640:32 10693:57 10799:15 10848:9 10875:40 11041:41 11290:95 11322:70 11577:49 11588:26 12798:149 12815:38 12916:33 13043:45 13177:66 13384:81 14015:105 14179:44 14222:24 14290:74 14375:22 14392:49 14404:58 14583:163 14733:51 14809:65 14990:62 15378:71 15388:105 15424:43 15488:112 15894:129 16034:81 16069:155 16223:85 16495:58 16637:75 16638:33 16810:5 17039:29 17363:104 17789:92 18115:75 18121:34 18323:35 18364:32 18370:92 18372:77 18589:74 18688:51 18696:43 19109:89 19131:81 19255:69 19327:17 19710:101 19889:26 19926:67 +626:109 1040:190 1057:88 1312:41 1853:156 1896:54 1914:81 1968:140 2446:22 2522:133 2737:126 3051:74 3204:110 3456:74 4545:71 4638:47 4716:105 4907:119 5169:41 5519:71 5561:118 5577:143 6112:70 6138:48 6234:137 6444:107 6577:179 6866:402 6905:80 7709:487 8050:80 8211:120 8789:130 8985:107 9596:161 10244:457 10457:69 10532:158 10735:76 11330:96 11398:84 11822:79 11929:25 12170:453 12358:100 12698:98 12916:122 13165:98 13324:9 13485:47 13929:46 14239:75 14662:397 14665:63 14682:54 16472:297 16810:107 17487:137 17547:154 17621:100 18323:95 18364:174 18397:27 18800:63 18944:90 19144:76 19494:67 19685:232 19752:91 +219:65 377:562 984:112 989:86 1052:55 1436:130 1599:750 2407:121 2592:81 2720:90 2929:93 2934:136 2989:96 2990:200 3337:46 3515:30 3578:19 3614:92 3989:126 4510:84 4539:73 4572:143 4576:37 4614:26 4737:90 5627:48 5743:56 5812:101 6066:33 6234:67 6374:29 6434:91 6866:43 6927:77 7400:85 7483:92 7638:70 7709:46 8061:54 8320:31 8363:98 8759:36 9109:55 9126:58 9143:130 9937:151 10532:293 10779:92 10799:110 11085:105 11106:164 11164:50 11621:93 11668:37 11698:122 11871:42 11989:120 12238:70 12320:81 12704:157 12916:1 12968:177 13378:170 13685:36 13854:103 13881:24 13929:33 14161:14 14227:123 14247:47 14585:48 14729:117 14851:28 15492:90 15798:37 15853:46 16034:57 16662:50 17369:50 17573:63 18323:24 19255:157 19327:27 19443:77 19834:46 19889:84 19921:26 +87:294 96:169 203:115 695:44 1070:33 1244:29 1299:83 1481:43 1717:213 1738:66 1981:108 1990:32 2118:99 2134:146 2186:51 2195:66 2500:58 2528:104 2877:85 2934:157 2977:56 3187:32 3361:96 3433:47 3515:26 3578:13 3826:593 3866:122 3892:114 3896:41 4259:44 4273:48 4382:58 4460:47 4614:47 4711:45 4957:97 5057:26 5305:31 5426:68 5596:40 6065:39 6199:62 6234:249 6662:33 6866:177 7055:47 7435:42 7494:123 7640:43 7709:144 7844:50 7893:38 8006:63 8306:46 8540:54 8612:59 8674:131 8707:42 8719:80 8737:33 8748:46 8804:83 9332:62 9424:34 9443:63 9614:132 9644:70 9757:39 9911:79 10001:55 10035:60 10218:195 10219:108 10238:171 10276:22 10277:56 10357:96 10377:29 10443:74 10457:22 10735:24 10744:34 10753:114 10925:36 11074:47 11106:31 11168:36 11342:53 11752:60 11971:43 12704:39 12724:81 12916:4 13143:116 13165:157 13288:131 13384:87 13514:58 13570:50 13688:48 13738:42 13757:124 13853:56 13881:4 14161:38 14325:64 14404:56 14507:31 14690:1 14731:93 14947:106 15204:155 15292:122 15303:64 15355:109 15433:113 15446:51 15448:64 15492:27 15780:59 15885:27 16034:127 16225:316 16406:44 16584:26 17173:24 17369:179 17374:47 17753:49 18020:70 18115:12 18141:75 18153:59 18155:105 18200:80 18282:157 18599:97 18622:42 19041:41 19174:61 19194:19 19210:77 19349:118 19429:46 19783:59 19787:285 19812:49 19926:115 19962:78 19973:34 diff --git a/inst/dev/datasets/svm/dexter_train.labels b/inst/dev/datasets/svm/dexter_train.labels new file mode 100644 index 00000000..83134a3e --- /dev/null +++ b/inst/dev/datasets/svm/dexter_train.labels @@ -0,0 +1,300 @@ +1 +-1 +1 +-1 +1 +-1 +1 +-1 +1 +1 +1 +1 +-1 +1 +1 +1 +-1 +1 +-1 +-1 +1 +-1 +1 +1 +1 +1 +1 +-1 +-1 +-1 +1 +-1 +-1 +1 +1 +1 +1 +-1 +1 +-1 +-1 +-1 +-1 +1 +-1 +-1 +-1 +-1 +-1 +1 +-1 +-1 +1 +-1 +-1 +-1 +1 +1 +1 +1 +1 +-1 +-1 +-1 +-1 +-1 +1 +-1 +1 +-1 +1 +-1 +-1 +-1 +1 +1 +1 +1 +1 +-1 +-1 +-1 +-1 +-1 +1 +1 +1 +1 +-1 +-1 +-1 +-1 +1 +-1 +1 +-1 +-1 +1 +1 +-1 +1 +1 +-1 +-1 +1 +1 +1 +1 +-1 +-1 +-1 +1 +1 +-1 +1 +1 +-1 +-1 +1 +1 +-1 +1 +-1 +-1 +1 +1 +1 +-1 +-1 +1 +1 +1 +-1 +-1 +1 +1 +-1 +-1 +1 +-1 +1 +1 +1 +-1 +-1 +-1 +1 +1 +-1 +-1 +1 +-1 +1 +-1 +1 +-1 +-1 +1 +1 +-1 +1 +-1 +1 +-1 +-1 +1 +-1 +1 +1 +-1 +1 +-1 +1 +-1 +-1 +-1 +1 +-1 +1 +1 +1 +1 +-1 +-1 +1 +-1 +1 +1 +1 +-1 +-1 +1 +-1 +-1 +1 +-1 +-1 +-1 +1 +-1 +-1 +1 +1 +-1 +1 +-1 +1 +1 +-1 +1 +1 +-1 +-1 +-1 +1 +-1 +-1 +1 +1 +-1 +1 +-1 +-1 +-1 +-1 +1 +1 +1 +1 +1 +1 +1 +-1 +-1 +1 +-1 +-1 +1 +1 +-1 +1 +1 +-1 +-1 +-1 +1 +1 +1 +-1 +1 +1 +-1 +1 +-1 +-1 +-1 +-1 +1 +-1 +1 +1 +-1 +-1 +1 +1 +-1 +-1 +1 +1 +1 +-1 +-1 +-1 +-1 +1 +1 +1 +1 +1 +-1 +-1 +1 +1 +-1 +-1 +1 +1 +-1 +1 +-1 +-1 +1 +1 +1 +-1 +-1 +-1 +-1 +1 +-1 diff --git a/inst/dev/datasets/svm/download_large.sh b/inst/dev/datasets/svm/download_large.sh new file mode 100755 index 00000000..dde6872c --- /dev/null +++ b/inst/dev/datasets/svm/download_large.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +rm -r -f large +mkdir large + +wget http://gmum.net/files/gmum.r/data/news20_part.RData +mv news20_part.RData large + diff --git a/inst/dev/datasets/svm/download_transduction.sh b/inst/dev/datasets/svm/download_transduction.sh new file mode 100755 index 00000000..1f1f56ff --- /dev/null +++ b/inst/dev/datasets/svm/download_transduction.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +wget http://download.joachims.org/svm_light/examples/example2.tar.gz +gunzip example2.tar.gz +tar -xvf example2.tar + +rm -r -f transductive +mkdir transductive +mv example2/* transductive +rm -r -f example2 +rm example2.tar +sed '1d' transductive/train_transduction.dat > tmp +mv tmp transductive/train_transduction.dat +sed '1d' transductive/train_induction.dat > tmp +mv tmp transductive/train_induction.dat +sed '1d' transductive/test.dat > tmp +mv tmp transductive/test.dat diff --git a/inst/dev/datasets/svm/liver-disorders b/inst/dev/datasets/svm/liver-disorders new file mode 100644 index 00000000..3c537c27 --- /dev/null +++ b/inst/dev/datasets/svm/liver-disorders @@ -0,0 +1,345 @@ +1 1:85.000000 2:92.000000 3:45.000000 4:27.000000 5:31.000000 6:0.000000 +2 1:85.000000 2:64.000000 3:59.000000 4:32.000000 5:23.000000 6:0.000000 +2 1:86.000000 2:54.000000 3:33.000000 4:16.000000 5:54.000000 6:0.000000 +2 1:91.000000 2:78.000000 3:34.000000 4:24.000000 5:36.000000 6:0.000000 +2 1:87.000000 2:70.000000 3:12.000000 4:28.000000 5:10.000000 6:0.000000 +2 1:98.000000 2:55.000000 3:13.000000 4:17.000000 5:17.000000 6:0.000000 +1 1:88.000000 2:62.000000 3:20.000000 4:17.000000 5:9.000000 6:0.500000 +1 1:88.000000 2:67.000000 3:21.000000 4:11.000000 5:11.000000 6:0.500000 +1 1:92.000000 2:54.000000 3:22.000000 4:20.000000 5:7.000000 6:0.500000 +1 1:90.000000 2:60.000000 3:25.000000 4:19.000000 5:5.000000 6:0.500000 +1 1:89.000000 2:52.000000 3:13.000000 4:24.000000 5:15.000000 6:0.500000 +1 1:82.000000 2:62.000000 3:17.000000 4:17.000000 5:15.000000 6:0.500000 +1 1:90.000000 2:64.000000 3:61.000000 4:32.000000 5:13.000000 6:0.500000 +1 1:86.000000 2:77.000000 3:25.000000 4:19.000000 5:18.000000 6:0.500000 +1 1:96.000000 2:67.000000 3:29.000000 4:20.000000 5:11.000000 6:0.500000 +1 1:91.000000 2:78.000000 3:20.000000 4:31.000000 5:18.000000 6:0.500000 +1 1:89.000000 2:67.000000 3:23.000000 4:16.000000 5:10.000000 6:0.500000 +1 1:89.000000 2:79.000000 3:17.000000 4:17.000000 5:16.000000 6:0.500000 +1 1:91.000000 2:107.000000 3:20.000000 4:20.000000 5:56.000000 6:0.500000 +1 1:94.000000 2:116.000000 3:11.000000 4:33.000000 5:11.000000 6:0.500000 +1 1:92.000000 2:59.000000 3:35.000000 4:13.000000 5:19.000000 6:0.500000 +1 1:93.000000 2:23.000000 3:35.000000 4:20.000000 5:20.000000 6:0.500000 +1 1:90.000000 2:60.000000 3:23.000000 4:27.000000 5:5.000000 6:0.500000 +1 1:96.000000 2:68.000000 3:18.000000 4:19.000000 5:19.000000 6:0.500000 +1 1:84.000000 2:80.000000 3:47.000000 4:33.000000 5:97.000000 6:0.500000 +1 1:92.000000 2:70.000000 3:24.000000 4:13.000000 5:26.000000 6:0.500000 +1 1:90.000000 2:47.000000 3:28.000000 4:15.000000 5:18.000000 6:0.500000 +1 1:88.000000 2:66.000000 3:20.000000 4:21.000000 5:10.000000 6:0.500000 +1 1:91.000000 2:102.000000 3:17.000000 4:13.000000 5:19.000000 6:0.500000 +1 1:87.000000 2:41.000000 3:31.000000 4:19.000000 5:16.000000 6:0.500000 +1 1:86.000000 2:79.000000 3:28.000000 4:16.000000 5:17.000000 6:0.500000 +1 1:91.000000 2:57.000000 3:31.000000 4:23.000000 5:42.000000 6:0.500000 +1 1:93.000000 2:77.000000 3:32.000000 4:18.000000 5:29.000000 6:0.500000 +1 1:88.000000 2:96.000000 3:28.000000 4:21.000000 5:40.000000 6:0.500000 +1 1:94.000000 2:65.000000 3:22.000000 4:18.000000 5:11.000000 6:0.500000 +2 1:91.000000 2:72.000000 3:155.000000 4:68.000000 5:82.000000 6:0.500000 +2 1:85.000000 2:54.000000 3:47.000000 4:33.000000 5:22.000000 6:0.500000 +2 1:79.000000 2:39.000000 3:14.000000 4:19.000000 5:9.000000 6:0.500000 +2 1:85.000000 2:85.000000 3:25.000000 4:26.000000 5:30.000000 6:0.500000 +2 1:89.000000 2:63.000000 3:24.000000 4:20.000000 5:38.000000 6:0.500000 +2 1:84.000000 2:92.000000 3:68.000000 4:37.000000 5:44.000000 6:0.500000 +2 1:89.000000 2:68.000000 3:26.000000 4:39.000000 5:42.000000 6:0.500000 +2 1:89.000000 2:101.000000 3:18.000000 4:25.000000 5:13.000000 6:0.500000 +2 1:86.000000 2:84.000000 3:18.000000 4:14.000000 5:16.000000 6:0.500000 +2 1:85.000000 2:65.000000 3:25.000000 4:14.000000 5:18.000000 6:0.500000 +2 1:88.000000 2:61.000000 3:19.000000 4:21.000000 5:13.000000 6:0.500000 +2 1:92.000000 2:56.000000 3:14.000000 4:16.000000 5:10.000000 6:0.500000 +2 1:95.000000 2:50.000000 3:29.000000 4:25.000000 5:50.000000 6:0.500000 +2 1:91.000000 2:75.000000 3:24.000000 4:22.000000 5:11.000000 6:0.500000 +2 1:83.000000 2:40.000000 3:29.000000 4:25.000000 5:38.000000 6:0.500000 +2 1:89.000000 2:74.000000 3:19.000000 4:23.000000 5:16.000000 6:0.500000 +2 1:85.000000 2:64.000000 3:24.000000 4:22.000000 5:11.000000 6:0.500000 +2 1:92.000000 2:57.000000 3:64.000000 4:36.000000 5:90.000000 6:0.500000 +2 1:94.000000 2:48.000000 3:11.000000 4:23.000000 5:43.000000 6:0.500000 +2 1:87.000000 2:52.000000 3:21.000000 4:19.000000 5:30.000000 6:0.500000 +2 1:85.000000 2:65.000000 3:23.000000 4:29.000000 5:15.000000 6:0.500000 +2 1:84.000000 2:82.000000 3:21.000000 4:21.000000 5:19.000000 6:0.500000 +2 1:88.000000 2:49.000000 3:20.000000 4:22.000000 5:19.000000 6:0.500000 +2 1:96.000000 2:67.000000 3:26.000000 4:26.000000 5:36.000000 6:0.500000 +2 1:90.000000 2:63.000000 3:24.000000 4:24.000000 5:24.000000 6:0.500000 +2 1:90.000000 2:45.000000 3:33.000000 4:34.000000 5:27.000000 6:0.500000 +2 1:90.000000 2:72.000000 3:14.000000 4:15.000000 5:18.000000 6:0.500000 +2 1:91.000000 2:55.000000 3:4.000000 4:8.000000 5:13.000000 6:0.500000 +2 1:91.000000 2:52.000000 3:15.000000 4:22.000000 5:11.000000 6:0.500000 +1 1:87.000000 2:71.000000 3:32.000000 4:19.000000 5:27.000000 6:1.000000 +1 1:89.000000 2:77.000000 3:26.000000 4:20.000000 5:19.000000 6:1.000000 +2 1:89.000000 2:67.000000 3:5.000000 4:17.000000 5:14.000000 6:1.000000 +2 1:85.000000 2:51.000000 3:26.000000 4:24.000000 5:23.000000 6:1.000000 +2 1:103.000000 2:75.000000 3:19.000000 4:30.000000 5:13.000000 6:1.000000 +2 1:90.000000 2:63.000000 3:16.000000 4:21.000000 5:14.000000 6:1.000000 +1 1:90.000000 2:63.000000 3:29.000000 4:23.000000 5:57.000000 6:2.000000 +1 1:90.000000 2:67.000000 3:35.000000 4:19.000000 5:35.000000 6:2.000000 +1 1:87.000000 2:66.000000 3:27.000000 4:22.000000 5:9.000000 6:2.000000 +1 1:90.000000 2:73.000000 3:34.000000 4:21.000000 5:22.000000 6:2.000000 +1 1:86.000000 2:54.000000 3:20.000000 4:21.000000 5:16.000000 6:2.000000 +1 1:90.000000 2:80.000000 3:19.000000 4:14.000000 5:42.000000 6:2.000000 +2 1:87.000000 2:90.000000 3:43.000000 4:28.000000 5:156.000000 6:2.000000 +2 1:96.000000 2:72.000000 3:28.000000 4:19.000000 5:30.000000 6:2.000000 +2 1:91.000000 2:55.000000 3:9.000000 4:25.000000 5:16.000000 6:2.000000 +2 1:95.000000 2:78.000000 3:27.000000 4:25.000000 5:30.000000 6:2.000000 +2 1:92.000000 2:101.000000 3:34.000000 4:30.000000 5:64.000000 6:2.000000 +2 1:89.000000 2:51.000000 3:41.000000 4:22.000000 5:48.000000 6:2.000000 +2 1:91.000000 2:99.000000 3:42.000000 4:33.000000 5:16.000000 6:2.000000 +2 1:94.000000 2:58.000000 3:21.000000 4:18.000000 5:26.000000 6:2.000000 +2 1:92.000000 2:60.000000 3:30.000000 4:27.000000 5:297.000000 6:2.000000 +2 1:94.000000 2:58.000000 3:21.000000 4:18.000000 5:26.000000 6:2.000000 +2 1:88.000000 2:47.000000 3:33.000000 4:26.000000 5:29.000000 6:2.000000 +2 1:92.000000 2:65.000000 3:17.000000 4:25.000000 5:9.000000 6:2.000000 +1 1:92.000000 2:79.000000 3:22.000000 4:20.000000 5:11.000000 6:3.000000 +1 1:84.000000 2:83.000000 3:20.000000 4:25.000000 5:7.000000 6:3.000000 +1 1:88.000000 2:68.000000 3:27.000000 4:21.000000 5:26.000000 6:3.000000 +1 1:86.000000 2:48.000000 3:20.000000 4:20.000000 5:6.000000 6:3.000000 +1 1:99.000000 2:69.000000 3:45.000000 4:32.000000 5:30.000000 6:3.000000 +1 1:88.000000 2:66.000000 3:23.000000 4:12.000000 5:15.000000 6:3.000000 +1 1:89.000000 2:62.000000 3:42.000000 4:30.000000 5:20.000000 6:3.000000 +1 1:90.000000 2:51.000000 3:23.000000 4:17.000000 5:27.000000 6:3.000000 +2 1:81.000000 2:61.000000 3:32.000000 4:37.000000 5:53.000000 6:3.000000 +2 1:89.000000 2:89.000000 3:23.000000 4:18.000000 5:104.000000 6:3.000000 +2 1:89.000000 2:65.000000 3:26.000000 4:18.000000 5:36.000000 6:3.000000 +2 1:92.000000 2:75.000000 3:26.000000 4:26.000000 5:24.000000 6:3.000000 +2 1:85.000000 2:59.000000 3:25.000000 4:20.000000 5:25.000000 6:3.000000 +2 1:92.000000 2:61.000000 3:18.000000 4:13.000000 5:81.000000 6:3.000000 +1 1:89.000000 2:63.000000 3:22.000000 4:27.000000 5:10.000000 6:4.000000 +1 1:90.000000 2:84.000000 3:18.000000 4:23.000000 5:13.000000 6:4.000000 +1 1:88.000000 2:95.000000 3:25.000000 4:19.000000 5:14.000000 6:4.000000 +1 1:89.000000 2:35.000000 3:27.000000 4:29.000000 5:17.000000 6:4.000000 +1 1:91.000000 2:80.000000 3:37.000000 4:23.000000 5:27.000000 6:4.000000 +1 1:91.000000 2:109.000000 3:33.000000 4:15.000000 5:18.000000 6:4.000000 +1 1:91.000000 2:65.000000 3:17.000000 4:5.000000 5:7.000000 6:4.000000 +2 1:88.000000 2:107.000000 3:29.000000 4:20.000000 5:50.000000 6:4.000000 +2 1:87.000000 2:76.000000 3:22.000000 4:55.000000 5:9.000000 6:4.000000 +2 1:87.000000 2:86.000000 3:28.000000 4:23.000000 5:21.000000 6:4.000000 +2 1:87.000000 2:42.000000 3:26.000000 4:23.000000 5:17.000000 6:4.000000 +2 1:88.000000 2:80.000000 3:24.000000 4:25.000000 5:17.000000 6:4.000000 +2 1:90.000000 2:96.000000 3:34.000000 4:49.000000 5:169.000000 6:4.000000 +2 1:86.000000 2:67.000000 3:11.000000 4:15.000000 5:8.000000 6:4.000000 +2 1:92.000000 2:40.000000 3:19.000000 4:20.000000 5:21.000000 6:4.000000 +2 1:85.000000 2:60.000000 3:17.000000 4:21.000000 5:14.000000 6:4.000000 +2 1:89.000000 2:90.000000 3:15.000000 4:17.000000 5:25.000000 6:4.000000 +2 1:91.000000 2:57.000000 3:15.000000 4:16.000000 5:16.000000 6:4.000000 +2 1:96.000000 2:55.000000 3:48.000000 4:39.000000 5:42.000000 6:4.000000 +2 1:79.000000 2:101.000000 3:17.000000 4:27.000000 5:23.000000 6:4.000000 +2 1:90.000000 2:134.000000 3:14.000000 4:20.000000 5:14.000000 6:4.000000 +2 1:89.000000 2:76.000000 3:14.000000 4:21.000000 5:24.000000 6:4.000000 +2 1:88.000000 2:93.000000 3:29.000000 4:27.000000 5:31.000000 6:4.000000 +2 1:90.000000 2:67.000000 3:10.000000 4:16.000000 5:16.000000 6:4.000000 +2 1:92.000000 2:73.000000 3:24.000000 4:21.000000 5:48.000000 6:4.000000 +2 1:91.000000 2:55.000000 3:28.000000 4:28.000000 5:82.000000 6:4.000000 +2 1:83.000000 2:45.000000 3:19.000000 4:21.000000 5:13.000000 6:4.000000 +2 1:90.000000 2:74.000000 3:19.000000 4:14.000000 5:22.000000 6:4.000000 +1 1:92.000000 2:66.000000 3:21.000000 4:16.000000 5:33.000000 6:5.000000 +1 1:93.000000 2:63.000000 3:26.000000 4:18.000000 5:18.000000 6:5.000000 +2 1:86.000000 2:78.000000 3:47.000000 4:39.000000 5:107.000000 6:5.000000 +2 1:97.000000 2:44.000000 3:113.000000 4:45.000000 5:150.000000 6:5.000000 +2 1:87.000000 2:59.000000 3:15.000000 4:19.000000 5:12.000000 6:5.000000 +2 1:86.000000 2:44.000000 3:21.000000 4:11.000000 5:15.000000 6:5.000000 +2 1:87.000000 2:64.000000 3:16.000000 4:20.000000 5:24.000000 6:5.000000 +2 1:92.000000 2:57.000000 3:21.000000 4:23.000000 5:22.000000 6:5.000000 +2 1:90.000000 2:70.000000 3:25.000000 4:23.000000 5:112.000000 6:5.000000 +2 1:99.000000 2:59.000000 3:17.000000 4:19.000000 5:11.000000 6:5.000000 +1 1:92.000000 2:80.000000 3:10.000000 4:26.000000 5:20.000000 6:6.000000 +1 1:95.000000 2:60.000000 3:26.000000 4:22.000000 5:28.000000 6:6.000000 +1 1:91.000000 2:63.000000 3:25.000000 4:26.000000 5:15.000000 6:6.000000 +1 1:92.000000 2:62.000000 3:37.000000 4:21.000000 5:36.000000 6:6.000000 +1 1:95.000000 2:50.000000 3:13.000000 4:14.000000 5:15.000000 6:6.000000 +1 1:90.000000 2:76.000000 3:37.000000 4:19.000000 5:50.000000 6:6.000000 +1 1:96.000000 2:70.000000 3:70.000000 4:26.000000 5:36.000000 6:6.000000 +1 1:95.000000 2:62.000000 3:64.000000 4:42.000000 5:76.000000 6:6.000000 +1 1:92.000000 2:62.000000 3:20.000000 4:23.000000 5:20.000000 6:6.000000 +1 1:91.000000 2:63.000000 3:25.000000 4:26.000000 5:15.000000 6:6.000000 +2 1:82.000000 2:56.000000 3:67.000000 4:38.000000 5:92.000000 6:6.000000 +2 1:92.000000 2:82.000000 3:27.000000 4:24.000000 5:37.000000 6:6.000000 +2 1:90.000000 2:63.000000 3:12.000000 4:26.000000 5:21.000000 6:6.000000 +2 1:88.000000 2:37.000000 3:9.000000 4:15.000000 5:16.000000 6:6.000000 +2 1:100.000000 2:60.000000 3:29.000000 4:23.000000 5:76.000000 6:6.000000 +2 1:98.000000 2:43.000000 3:35.000000 4:23.000000 5:69.000000 6:6.000000 +2 1:91.000000 2:74.000000 3:87.000000 4:50.000000 5:67.000000 6:6.000000 +2 1:92.000000 2:87.000000 3:57.000000 4:25.000000 5:44.000000 6:6.000000 +2 1:93.000000 2:99.000000 3:36.000000 4:34.000000 5:48.000000 6:6.000000 +2 1:90.000000 2:72.000000 3:17.000000 4:19.000000 5:19.000000 6:6.000000 +2 1:97.000000 2:93.000000 3:21.000000 4:20.000000 5:68.000000 6:6.000000 +2 1:93.000000 2:50.000000 3:18.000000 4:25.000000 5:17.000000 6:6.000000 +2 1:90.000000 2:57.000000 3:20.000000 4:26.000000 5:33.000000 6:6.000000 +2 1:92.000000 2:76.000000 3:31.000000 4:28.000000 5:41.000000 6:6.000000 +2 1:88.000000 2:55.000000 3:19.000000 4:17.000000 5:14.000000 6:6.000000 +2 1:89.000000 2:63.000000 3:24.000000 4:29.000000 5:29.000000 6:6.000000 +1 1:92.000000 2:79.000000 3:70.000000 4:32.000000 5:84.000000 6:7.000000 +1 1:92.000000 2:93.000000 3:58.000000 4:35.000000 5:120.000000 6:7.000000 +2 1:93.000000 2:84.000000 3:58.000000 4:47.000000 5:62.000000 6:7.000000 +1 1:97.000000 2:71.000000 3:29.000000 4:22.000000 5:52.000000 6:8.000000 +1 1:84.000000 2:99.000000 3:33.000000 4:19.000000 5:26.000000 6:8.000000 +1 1:96.000000 2:44.000000 3:42.000000 4:23.000000 5:73.000000 6:8.000000 +1 1:90.000000 2:62.000000 3:22.000000 4:21.000000 5:21.000000 6:8.000000 +1 1:92.000000 2:94.000000 3:18.000000 4:17.000000 5:6.000000 6:8.000000 +1 1:90.000000 2:67.000000 3:77.000000 4:39.000000 5:114.000000 6:8.000000 +1 1:97.000000 2:71.000000 3:29.000000 4:22.000000 5:52.000000 6:8.000000 +2 1:91.000000 2:69.000000 3:25.000000 4:25.000000 5:66.000000 6:8.000000 +2 1:93.000000 2:59.000000 3:17.000000 4:20.000000 5:14.000000 6:8.000000 +2 1:92.000000 2:95.000000 3:85.000000 4:48.000000 5:200.000000 6:8.000000 +2 1:90.000000 2:50.000000 3:26.000000 4:22.000000 5:53.000000 6:8.000000 +2 1:91.000000 2:62.000000 3:59.000000 4:47.000000 5:60.000000 6:8.000000 +1 1:92.000000 2:93.000000 3:22.000000 4:28.000000 5:123.000000 6:9.000000 +1 1:92.000000 2:77.000000 3:86.000000 4:41.000000 5:31.000000 6:10.000000 +2 1:86.000000 2:66.000000 3:22.000000 4:24.000000 5:26.000000 6:10.000000 +2 1:98.000000 2:57.000000 3:31.000000 4:34.000000 5:73.000000 6:10.000000 +2 1:95.000000 2:80.000000 3:50.000000 4:64.000000 5:55.000000 6:10.000000 +2 1:92.000000 2:108.000000 3:53.000000 4:33.000000 5:94.000000 6:12.000000 +2 1:97.000000 2:92.000000 3:22.000000 4:28.000000 5:49.000000 6:12.000000 +1 1:93.000000 2:77.000000 3:39.000000 4:37.000000 5:108.000000 6:16.000000 +1 1:94.000000 2:83.000000 3:81.000000 4:34.000000 5:201.000000 6:20.000000 +1 1:87.000000 2:75.000000 3:25.000000 4:21.000000 5:14.000000 6:0.000000 +1 1:88.000000 2:56.000000 3:23.000000 4:18.000000 5:12.000000 6:0.000000 +2 1:84.000000 2:97.000000 3:41.000000 4:20.000000 5:32.000000 6:0.000000 +1 1:94.000000 2:91.000000 3:27.000000 4:20.000000 5:15.000000 6:0.500000 +1 1:97.000000 2:62.000000 3:17.000000 4:13.000000 5:5.000000 6:0.500000 +1 1:92.000000 2:85.000000 3:25.000000 4:20.000000 5:12.000000 6:0.500000 +1 1:82.000000 2:48.000000 3:27.000000 4:15.000000 5:12.000000 6:0.500000 +1 1:88.000000 2:74.000000 3:31.000000 4:25.000000 5:15.000000 6:0.500000 +1 1:95.000000 2:77.000000 3:30.000000 4:14.000000 5:21.000000 6:0.500000 +1 1:88.000000 2:94.000000 3:26.000000 4:18.000000 5:8.000000 6:0.500000 +1 1:91.000000 2:70.000000 3:19.000000 4:19.000000 5:22.000000 6:0.500000 +1 1:83.000000 2:54.000000 3:27.000000 4:15.000000 5:12.000000 6:0.500000 +1 1:91.000000 2:105.000000 3:40.000000 4:26.000000 5:56.000000 6:0.500000 +1 1:86.000000 2:79.000000 3:37.000000 4:28.000000 5:14.000000 6:0.500000 +1 1:91.000000 2:96.000000 3:35.000000 4:22.000000 5:135.000000 6:0.500000 +1 1:89.000000 2:82.000000 3:23.000000 4:14.000000 5:35.000000 6:0.500000 +1 1:90.000000 2:73.000000 3:24.000000 4:23.000000 5:11.000000 6:0.500000 +1 1:90.000000 2:87.000000 3:19.000000 4:25.000000 5:19.000000 6:0.500000 +1 1:89.000000 2:82.000000 3:33.000000 4:32.000000 5:18.000000 6:0.500000 +1 1:85.000000 2:79.000000 3:17.000000 4:8.000000 5:9.000000 6:0.500000 +1 1:85.000000 2:119.000000 3:30.000000 4:26.000000 5:17.000000 6:0.500000 +1 1:78.000000 2:69.000000 3:24.000000 4:18.000000 5:31.000000 6:0.500000 +1 1:88.000000 2:107.000000 3:34.000000 4:21.000000 5:27.000000 6:0.500000 +1 1:89.000000 2:115.000000 3:17.000000 4:27.000000 5:7.000000 6:0.500000 +1 1:92.000000 2:67.000000 3:23.000000 4:15.000000 5:12.000000 6:0.500000 +1 1:89.000000 2:101.000000 3:27.000000 4:34.000000 5:14.000000 6:0.500000 +1 1:91.000000 2:84.000000 3:11.000000 4:12.000000 5:10.000000 6:0.500000 +2 1:94.000000 2:101.000000 3:41.000000 4:20.000000 5:53.000000 6:0.500000 +2 1:88.000000 2:46.000000 3:29.000000 4:22.000000 5:18.000000 6:0.500000 +2 1:88.000000 2:122.000000 3:35.000000 4:29.000000 5:42.000000 6:0.500000 +2 1:84.000000 2:88.000000 3:28.000000 4:25.000000 5:35.000000 6:0.500000 +2 1:90.000000 2:79.000000 3:18.000000 4:15.000000 5:24.000000 6:0.500000 +2 1:87.000000 2:69.000000 3:22.000000 4:26.000000 5:11.000000 6:0.500000 +2 1:65.000000 2:63.000000 3:19.000000 4:20.000000 5:14.000000 6:0.500000 +2 1:90.000000 2:64.000000 3:12.000000 4:17.000000 5:14.000000 6:0.500000 +2 1:85.000000 2:58.000000 3:18.000000 4:24.000000 5:16.000000 6:0.500000 +2 1:88.000000 2:81.000000 3:41.000000 4:27.000000 5:36.000000 6:0.500000 +2 1:86.000000 2:78.000000 3:52.000000 4:29.000000 5:62.000000 6:0.500000 +2 1:82.000000 2:74.000000 3:38.000000 4:28.000000 5:48.000000 6:0.500000 +2 1:86.000000 2:58.000000 3:36.000000 4:27.000000 5:59.000000 6:0.500000 +2 1:94.000000 2:56.000000 3:30.000000 4:18.000000 5:27.000000 6:0.500000 +2 1:87.000000 2:57.000000 3:30.000000 4:30.000000 5:22.000000 6:0.500000 +2 1:98.000000 2:74.000000 3:148.000000 4:75.000000 5:159.000000 6:0.500000 +2 1:94.000000 2:75.000000 3:20.000000 4:25.000000 5:38.000000 6:0.500000 +2 1:83.000000 2:68.000000 3:17.000000 4:20.000000 5:71.000000 6:0.500000 +2 1:93.000000 2:56.000000 3:25.000000 4:21.000000 5:33.000000 6:0.500000 +2 1:101.000000 2:65.000000 3:18.000000 4:21.000000 5:22.000000 6:0.500000 +2 1:92.000000 2:65.000000 3:25.000000 4:20.000000 5:31.000000 6:0.500000 +2 1:92.000000 2:58.000000 3:14.000000 4:16.000000 5:13.000000 6:0.500000 +2 1:86.000000 2:58.000000 3:16.000000 4:23.000000 5:23.000000 6:0.500000 +2 1:85.000000 2:62.000000 3:15.000000 4:13.000000 5:22.000000 6:0.500000 +2 1:86.000000 2:57.000000 3:13.000000 4:20.000000 5:13.000000 6:0.500000 +2 1:86.000000 2:54.000000 3:26.000000 4:30.000000 5:13.000000 6:0.500000 +1 1:81.000000 2:41.000000 3:33.000000 4:27.000000 5:34.000000 6:1.000000 +1 1:91.000000 2:67.000000 3:32.000000 4:26.000000 5:13.000000 6:1.000000 +1 1:91.000000 2:80.000000 3:21.000000 4:19.000000 5:14.000000 6:1.000000 +1 1:92.000000 2:60.000000 3:23.000000 4:15.000000 5:19.000000 6:1.000000 +1 1:91.000000 2:60.000000 3:32.000000 4:14.000000 5:8.000000 6:1.000000 +1 1:93.000000 2:65.000000 3:28.000000 4:22.000000 5:10.000000 6:1.000000 +2 1:90.000000 2:63.000000 3:45.000000 4:24.000000 5:85.000000 6:1.000000 +2 1:87.000000 2:92.000000 3:21.000000 4:22.000000 5:37.000000 6:1.000000 +2 1:83.000000 2:78.000000 3:31.000000 4:19.000000 5:115.000000 6:1.000000 +2 1:95.000000 2:62.000000 3:24.000000 4:23.000000 5:14.000000 6:1.000000 +2 1:93.000000 2:59.000000 3:41.000000 4:30.000000 5:48.000000 6:1.000000 +1 1:84.000000 2:82.000000 3:43.000000 4:32.000000 5:38.000000 6:2.000000 +1 1:87.000000 2:71.000000 3:33.000000 4:20.000000 5:22.000000 6:2.000000 +1 1:86.000000 2:44.000000 3:24.000000 4:15.000000 5:18.000000 6:2.000000 +1 1:86.000000 2:66.000000 3:28.000000 4:24.000000 5:21.000000 6:2.000000 +1 1:88.000000 2:58.000000 3:31.000000 4:17.000000 5:17.000000 6:2.000000 +1 1:90.000000 2:61.000000 3:28.000000 4:29.000000 5:31.000000 6:2.000000 +1 1:88.000000 2:69.000000 3:70.000000 4:24.000000 5:64.000000 6:2.000000 +1 1:93.000000 2:87.000000 3:18.000000 4:17.000000 5:26.000000 6:2.000000 +1 1:98.000000 2:58.000000 3:33.000000 4:21.000000 5:28.000000 6:2.000000 +2 1:91.000000 2:44.000000 3:18.000000 4:18.000000 5:23.000000 6:2.000000 +2 1:87.000000 2:75.000000 3:37.000000 4:19.000000 5:70.000000 6:2.000000 +2 1:94.000000 2:91.000000 3:30.000000 4:26.000000 5:25.000000 6:2.000000 +2 1:88.000000 2:85.000000 3:14.000000 4:15.000000 5:10.000000 6:2.000000 +2 1:89.000000 2:109.000000 3:26.000000 4:25.000000 5:27.000000 6:2.000000 +2 1:87.000000 2:59.000000 3:37.000000 4:27.000000 5:34.000000 6:2.000000 +2 1:93.000000 2:58.000000 3:20.000000 4:23.000000 5:18.000000 6:2.000000 +2 1:88.000000 2:57.000000 3:9.000000 4:15.000000 5:16.000000 6:2.000000 +1 1:94.000000 2:65.000000 3:38.000000 4:27.000000 5:17.000000 6:3.000000 +1 1:91.000000 2:71.000000 3:12.000000 4:22.000000 5:11.000000 6:3.000000 +1 1:90.000000 2:55.000000 3:20.000000 4:20.000000 5:16.000000 6:3.000000 +2 1:91.000000 2:64.000000 3:21.000000 4:17.000000 5:26.000000 6:3.000000 +2 1:88.000000 2:47.000000 3:35.000000 4:26.000000 5:33.000000 6:3.000000 +2 1:82.000000 2:72.000000 3:31.000000 4:20.000000 5:84.000000 6:3.000000 +2 1:85.000000 2:58.000000 3:83.000000 4:49.000000 5:51.000000 6:3.000000 +1 1:91.000000 2:54.000000 3:25.000000 4:22.000000 5:35.000000 6:4.000000 +2 1:98.000000 2:50.000000 3:27.000000 4:25.000000 5:53.000000 6:4.000000 +2 1:86.000000 2:62.000000 3:29.000000 4:21.000000 5:26.000000 6:4.000000 +2 1:89.000000 2:48.000000 3:32.000000 4:22.000000 5:14.000000 6:4.000000 +2 1:82.000000 2:68.000000 3:20.000000 4:22.000000 5:9.000000 6:4.000000 +2 1:83.000000 2:70.000000 3:17.000000 4:19.000000 5:23.000000 6:4.000000 +2 1:96.000000 2:70.000000 3:21.000000 4:26.000000 5:21.000000 6:4.000000 +2 1:94.000000 2:117.000000 3:77.000000 4:56.000000 5:52.000000 6:4.000000 +2 1:93.000000 2:45.000000 3:11.000000 4:14.000000 5:21.000000 6:4.000000 +2 1:93.000000 2:49.000000 3:27.000000 4:21.000000 5:29.000000 6:4.000000 +2 1:84.000000 2:73.000000 3:46.000000 4:32.000000 5:39.000000 6:4.000000 +2 1:91.000000 2:63.000000 3:17.000000 4:17.000000 5:46.000000 6:4.000000 +2 1:90.000000 2:57.000000 3:31.000000 4:18.000000 5:37.000000 6:4.000000 +2 1:87.000000 2:45.000000 3:19.000000 4:13.000000 5:16.000000 6:4.000000 +2 1:91.000000 2:68.000000 3:14.000000 4:20.000000 5:19.000000 6:4.000000 +2 1:86.000000 2:55.000000 3:29.000000 4:35.000000 5:108.000000 6:4.000000 +2 1:91.000000 2:86.000000 3:52.000000 4:47.000000 5:52.000000 6:4.000000 +2 1:88.000000 2:46.000000 3:15.000000 4:33.000000 5:55.000000 6:4.000000 +2 1:85.000000 2:52.000000 3:22.000000 4:23.000000 5:34.000000 6:4.000000 +2 1:89.000000 2:72.000000 3:33.000000 4:27.000000 5:55.000000 6:4.000000 +2 1:95.000000 2:59.000000 3:23.000000 4:18.000000 5:19.000000 6:4.000000 +2 1:94.000000 2:43.000000 3:154.000000 4:82.000000 5:121.000000 6:4.000000 +2 1:96.000000 2:56.000000 3:38.000000 4:26.000000 5:23.000000 6:5.000000 +2 1:90.000000 2:52.000000 3:10.000000 4:17.000000 5:12.000000 6:5.000000 +2 1:94.000000 2:45.000000 3:20.000000 4:16.000000 5:12.000000 6:5.000000 +2 1:99.000000 2:42.000000 3:14.000000 4:21.000000 5:49.000000 6:5.000000 +2 1:93.000000 2:102.000000 3:47.000000 4:23.000000 5:37.000000 6:5.000000 +2 1:94.000000 2:71.000000 3:25.000000 4:26.000000 5:31.000000 6:5.000000 +2 1:92.000000 2:73.000000 3:33.000000 4:34.000000 5:115.000000 6:5.000000 +1 1:87.000000 2:54.000000 3:41.000000 4:29.000000 5:23.000000 6:6.000000 +1 1:92.000000 2:67.000000 3:15.000000 4:14.000000 5:14.000000 6:6.000000 +1 1:98.000000 2:101.000000 3:31.000000 4:26.000000 5:32.000000 6:6.000000 +1 1:92.000000 2:53.000000 3:51.000000 4:33.000000 5:92.000000 6:6.000000 +1 1:97.000000 2:94.000000 3:43.000000 4:43.000000 5:82.000000 6:6.000000 +1 1:93.000000 2:43.000000 3:11.000000 4:16.000000 5:54.000000 6:6.000000 +1 1:93.000000 2:68.000000 3:24.000000 4:18.000000 5:19.000000 6:6.000000 +1 1:95.000000 2:36.000000 3:38.000000 4:19.000000 5:15.000000 6:6.000000 +1 1:99.000000 2:86.000000 3:58.000000 4:42.000000 5:203.000000 6:6.000000 +1 1:98.000000 2:66.000000 3:103.000000 4:57.000000 5:114.000000 6:6.000000 +1 1:92.000000 2:80.000000 3:10.000000 4:26.000000 5:20.000000 6:6.000000 +2 1:96.000000 2:74.000000 3:27.000000 4:25.000000 5:43.000000 6:6.000000 +2 1:95.000000 2:93.000000 3:21.000000 4:27.000000 5:47.000000 6:6.000000 +2 1:86.000000 2:109.000000 3:16.000000 4:22.000000 5:28.000000 6:6.000000 +2 1:91.000000 2:46.000000 3:30.000000 4:24.000000 5:39.000000 6:7.000000 +2 1:102.000000 2:82.000000 3:34.000000 4:78.000000 5:203.000000 6:7.000000 +2 1:85.000000 2:50.000000 3:12.000000 4:18.000000 5:14.000000 6:7.000000 +1 1:91.000000 2:57.000000 3:33.000000 4:23.000000 5:12.000000 6:8.000000 +1 1:91.000000 2:52.000000 3:76.000000 4:32.000000 5:24.000000 6:8.000000 +1 1:93.000000 2:70.000000 3:46.000000 4:30.000000 5:33.000000 6:8.000000 +1 1:87.000000 2:55.000000 3:36.000000 4:19.000000 5:25.000000 6:8.000000 +1 1:98.000000 2:123.000000 3:28.000000 4:24.000000 5:31.000000 6:8.000000 +2 1:82.000000 2:55.000000 3:18.000000 4:23.000000 5:44.000000 6:8.000000 +2 1:95.000000 2:73.000000 3:20.000000 4:25.000000 5:225.000000 6:8.000000 +2 1:97.000000 2:80.000000 3:17.000000 4:20.000000 5:53.000000 6:8.000000 +2 1:100.000000 2:83.000000 3:25.000000 4:24.000000 5:28.000000 6:8.000000 +2 1:88.000000 2:91.000000 3:56.000000 4:35.000000 5:126.000000 6:9.000000 +1 1:91.000000 2:138.000000 3:45.000000 4:21.000000 5:48.000000 6:10.000000 +1 1:92.000000 2:41.000000 3:37.000000 4:22.000000 5:37.000000 6:10.000000 +2 1:86.000000 2:123.000000 3:20.000000 4:25.000000 5:23.000000 6:10.000000 +2 1:91.000000 2:93.000000 3:35.000000 4:34.000000 5:37.000000 6:10.000000 +2 1:87.000000 2:87.000000 3:15.000000 4:23.000000 5:11.000000 6:10.000000 +2 1:87.000000 2:56.000000 3:52.000000 4:43.000000 5:55.000000 6:10.000000 +1 1:99.000000 2:75.000000 3:26.000000 4:24.000000 5:41.000000 6:12.000000 +2 1:96.000000 2:69.000000 3:53.000000 4:43.000000 5:203.000000 6:12.000000 +1 1:98.000000 2:77.000000 3:55.000000 4:35.000000 5:89.000000 6:15.000000 +1 1:91.000000 2:68.000000 3:27.000000 4:26.000000 5:14.000000 6:16.000000 +1 1:98.000000 2:99.000000 3:57.000000 4:45.000000 5:65.000000 6:20.000000 diff --git a/inst/include/cec/algorithm.hpp b/inst/include/cec/algorithm.hpp new file mode 100644 index 00000000..77ca2d6f --- /dev/null +++ b/inst/include/cec/algorithm.hpp @@ -0,0 +1,68 @@ +#ifndef ALGORITHM_HPP +#define ALGORITHM_HPP + +#include +#include "cluster.hpp" + +namespace gmum { + +struct SingleResult { + int switched; + int nclusters; + double energy; + SingleResult() { + switched = 0; + nclusters = 0; + energy = 0.0; + } + SingleResult(int switched, int nclusters, double energy) : + switched(switched), nclusters(nclusters), energy(energy) { + } +}; + +struct TotalResult { + int iterations; + std::list nclusters; + // energy from all iterations of algorithm + std::list energy_history; + double energy; + + TotalResult() : + iterations(0) { + energy = std::numeric_limits::max(); + } + void append(SingleResult result, bool log_nlusters, bool log_energy) { + ++iterations; + energy = result.energy; + if (log_nlusters) { + nclusters.push_back(result.nclusters); + } + if (log_energy) { + energy_history.push_back(result.energy); + } + } +}; + +class Algorithm { +protected: + bool m_log_nclusters, m_log_energy; + int m_max_iter; +public: + Algorithm(bool log_nclusters, bool log_energy, int max_iter) : + m_log_nclusters(log_nclusters), m_log_energy(log_energy), m_max_iter( + max_iter) { + } + virtual TotalResult loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters)=0; + virtual SingleResult single_loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters)=0; + virtual ~Algorithm() { + } + +}; + +} + +#endif diff --git a/inst/include/cec/assignment.hpp b/inst/include/cec/assignment.hpp new file mode 100644 index 00000000..52b0d833 --- /dev/null +++ b/inst/include/cec/assignment.hpp @@ -0,0 +1,47 @@ +#ifndef ASSIGNMENT_HPP +#define ASSIGNMENT_HPP + +#include "boost/foreach.hpp" +#include +#include + +namespace gmum { + +/** + * Different methods of initiating assignment inherit from this class. + * You use operator() for getting new assignment. + */ +class Assignment { +protected: + const arma::mat &m_points; + const unsigned int m_nclusters; + int m_seed; +public: + Assignment(const arma::mat &points, const unsigned int nclusters); + virtual void operator()(std::vector &assignment) = 0; + virtual ~Assignment(); + void set_seed(int seed); +}; + +/** + * @centers are ids of rows in points + */ +unsigned int find_nearest(unsigned int i, + const std::vector ¢ers, const arma::mat &points); + +unsigned int find_nearest(unsigned int i, + const std::list > ¢ers, + const arma::mat &points); + +/** + * @centers are ids of rows in points + */ +void assign_points(std::vector &assignment, + const std::vector ¢ers, const arma::mat &points); + +void assign_points(std::vector &assignment, + const std::list > ¢ers, + const arma::mat &points); +} + +#endif diff --git a/inst/include/cec/cec.hpp b/inst/include/cec/cec.hpp new file mode 100644 index 00000000..b6cc5c64 --- /dev/null +++ b/inst/include/cec/cec.hpp @@ -0,0 +1,58 @@ +#ifndef CEC_HPP +#define CEC_HPP + +#include +#include +#include +#include "boost/foreach.hpp" +#include "boost/smart_ptr.hpp" +#include "algorithm.hpp" +#include "cluster.hpp" +#include "cluster_custom_function.hpp" +#include "cluster_params.hpp" +#include "cec_configuration.hpp" +#include "exceptions.hpp" +#include "params.hpp" + +/** + * Instance of this class is CEC model object. + */ +class CecModel { +private: + std::vector m_clusters; + gmum::TotalResult m_result; + std::vector m_assignment; + std::vector m_inv_set; + std::vector m_inv; + + // pointer to the object created by R, it shouldn't be freed by the user because R built in GC will do it. + CecConfiguration* m_config; + + gmum::Cluster * create_cluster(gmum::ClusterParams* cluster_params, int i); + void find_best_cec(); + void init_clusters(std::vector& assignment); + void clear_clusters(); + +public: + ~CecModel(); + CecModel(CecConfiguration* cfg); + CecModel(CecModel& other); + CecModel& operator=(CecModel& other); + + void loop(); + void single_loop(); + double entropy(); + std::vector get_assignment() const; + void set_assignment(std::vector& assignment); + arma::mat get_points(); + std::vector centers() const; + std::vector cov() const; + unsigned int iters() const; + std::list get_nclusters() const; + std::list get_energy_history() const; + double get_energy() const; + unsigned int predict(std::vector vec); + const gmum::TotalResult& get_result() const; +}; + +#endif diff --git a/inst/include/cec/cec_configuration.hpp b/inst/include/cec/cec_configuration.hpp new file mode 100644 index 00000000..180ba187 --- /dev/null +++ b/inst/include/cec/cec_configuration.hpp @@ -0,0 +1,62 @@ +#ifndef CECCONFIGURATION_HPP +#define CECCONFIGURATION_HPP + +#include "params.hpp" + +#ifdef RCPP_INTERFACE +#include +#endif + +#ifdef RCPP_INTERFACE +#define GMUM_ERROR(x) Rcpp::stop(x); +#define GMUM_WARNING(x) Rcpp::Rcerr << (x); +#else +#define GMUM_ERROR(x) throw x; +#define GMUM_WARNING(x) std::cerr << x; +#endif + +class CecConfiguration { +private: + gmum::Params m_params; + // Rcpp::Function func; +#ifdef RCPP_INTERFACE + void set_mix_handle_standard_cluster(Rcpp::List& list); + void set_mix_handle_fixed_covariance_cluster(Rcpp::List &list); + void set_mix_handle_fixed_spherical_cluster(Rcpp::List &list); + void set_mix_handle_spherical_cluster(Rcpp::List &list); + void set_mix_handle_diagonal_cluster(Rcpp::List& list); + void set_mix_handle_custom_cluster(Rcpp::List& list); +#endif + +public: + CecConfiguration(); + + gmum::Params& get_params(); + void set_params(gmum::Params params); + + void set_eps(const double kill_threshold); + + void set_nclusters(const unsigned int nclusters); + void set_log_energy(bool log_energy); + void set_log_cluster(bool log_nclusters); + void set_nstart(const unsigned int nstart); + +#ifdef RCPP_INTERFACE + void set_mix(const Rcpp::List clusters); + void set_centroids(const Rcpp::List centroids); + void set_data_set(const Rcpp::NumericMatrix proxy_dataset); + void set_cov(const Rcpp::NumericMatrix cov_mat_proxy); + void set_function(Rcpp::Function function); +#endif + + void set_method_init(const std::string init); + void set_method_type(const std::string type); + + void set_r(const double radius); + + void set_it_max(int it_max); + void set_algorithm(const std::string algorithm); + void set_seed(int seed); +}; + +#endif /* CECCONFIGURATION_HPP */ diff --git a/inst/include/cec/centroids_assignment.hpp b/inst/include/cec/centroids_assignment.hpp new file mode 100644 index 00000000..9bf6d2b7 --- /dev/null +++ b/inst/include/cec/centroids_assignment.hpp @@ -0,0 +1,27 @@ +#ifndef CENTROIDASSIGNMENT_HPP +#define CENTROIDASSIGNMENT_HPP + +#include +#include +#include "assignment.hpp" + +namespace gmum { + +/** + * Points are labeled by the closest centroid + */ +class CentroidsAssignment: public Assignment { +protected: + const std::list > &m_centroids; +public: + CentroidsAssignment(const arma::mat &points, const unsigned int nclusters, + const std::list > ¢r) : + Assignment::Assignment(points, nclusters), m_centroids(centr) { + } + virtual void operator()(std::vector &assignment); +}; + +} + +#endif + diff --git a/inst/include/cec/cluster.hpp b/inst/include/cec/cluster.hpp new file mode 100644 index 00000000..2a3be8cf --- /dev/null +++ b/inst/include/cec/cluster.hpp @@ -0,0 +1,203 @@ +#ifndef CLUSTER_HPP +#define CLUSTER_HPP + +//add_point and remove_point are updating the cluster. nice! madry stan + +#include +#include +#include +#include +#include "boost/smart_ptr.hpp" +#include "exceptions.hpp" + +namespace gmum { + +/** + * Cluster stores its entropy and knows how to update it + */ +class Cluster { +protected: + unsigned int m_n; + int m_count; + arma::rowvec m_mean; + double m_entropy; + + Cluster(int count, const arma::rowvec& mean); + Cluster(unsigned int id, const std::vector &assignment, + const arma::mat &points); + arma::rowvec initialize_mean(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +public: + virtual ~Cluster() { } + virtual void add_point(const arma::rowvec& point) = 0; + virtual void remove_point(const arma::rowvec& point) = 0; + + virtual double entropy_after_add_point(const arma::rowvec &point) = 0; + virtual double entropy_after_remove_point(const arma::rowvec &point) = 0; + virtual Cluster* clone() = 0; + + double entropy() const; + int size() const; + arma::rowvec get_mean(); + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points) = 0; +}; + +//abstract, never created +class ClusterUseCovMat: public Cluster { +protected: + arma::mat m_cov_mat; + arma::mat m_cov_mat_tmp; + + void initialize_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + ClusterUseCovMat(int count, const arma::rowvec &mean, + const arma::mat& cov_mat); + ClusterUseCovMat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + + virtual double calculate_entropy(int n, const arma::mat &cov_mat) = 0; +public: + void add_point(const arma::rowvec &point); + void remove_point(const arma::rowvec &point); + + double entropy_after_add_point(const arma::rowvec &point); + double entropy_after_remove_point(const arma::rowvec &point); + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ~ClusterUseCovMat() { } +}; + +//abstract, never created +class ClusterOnlyTrace: public Cluster { +protected: + double m_cov_mat_trace; + ClusterOnlyTrace(int count, const arma::rowvec & mean, + double cov_mat_trace); + ClusterOnlyTrace(unsigned int id, + const std::vector & assignment, + const arma::mat & points); + void compute_cov_mat_trace(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual double calculate_entropy(double, int) =0; +public: + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + + void add_point(const arma::rowvec & point); + void remove_point(const arma::rowvec &point); + + double entropy_after_add_point(const arma::rowvec &point); + double entropy_after_remove_point(const arma::rowvec &point); + + double get_cov_mat_trace(); + + virtual ~ClusterOnlyTrace() { } + +}; + +class ClusterStandard: public ClusterUseCovMat { +private: + double calculate_entropy(int n, const arma::mat &cov_mat); + +public: + ClusterStandard(int count, const arma::rowvec &mean, + const arma::mat &cov_mat); + ClusterStandard(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ClusterStandard* clone(); + + virtual ~ClusterStandard() { } + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +}; + +class ClusterFixedCovariance : public ClusterUseCovMat { +private: + arma::mat m_inv_sigma; + double m_sigma_det; + + double calculate_entropy(int n, const arma::mat &cov_mat); +public: + ClusterFixedCovariance(const arma::mat& inv_sigma, double sigma_det, int count, + const arma::rowvec & mean, const arma::mat & cov_mat); + ClusterFixedCovariance(const arma::mat & sigma, unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ClusterFixedCovariance * clone(); + virtual ~ClusterFixedCovariance() { } + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +}; + +class ClusterSphericalFixedR : public ClusterOnlyTrace { +private: + double calculate_entropy(double, int); + double m_r; +public: + ClusterSphericalFixedR(double r, int count, const arma::rowvec & mean, + double cov_mat_trace); + ClusterSphericalFixedR(double r, unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ClusterSphericalFixedR * clone(); + + virtual ~ClusterSphericalFixedR() { } + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +}; + +class ClusterSpherical: public ClusterOnlyTrace { +private: + double calculate_entropy(double, int); +public: + ClusterSpherical(int count, const arma::rowvec & mean, + double cov_mat_trace); + ClusterSpherical(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ClusterSpherical* clone(); + + virtual ~ClusterSpherical() { } + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +}; + +class ClusterDiagonal: public ClusterUseCovMat { +private: + double calculate_entropy(int n, const arma::mat &cov_mat); +public: + ClusterDiagonal(int count, const arma::rowvec & mean, + const arma::mat & cov_mat); + ClusterDiagonal(unsigned int id, + const std::vector &assignment, + const arma::mat &points); + virtual ClusterDiagonal* clone(); + + virtual ~ClusterDiagonal() { } + + virtual arma::mat get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points); +}; + +} + +#endif diff --git a/inst/include/cec/cluster_custom_function.hpp b/inst/include/cec/cluster_custom_function.hpp new file mode 100644 index 00000000..acb856e0 --- /dev/null +++ b/inst/include/cec/cluster_custom_function.hpp @@ -0,0 +1,34 @@ +#ifndef CLUSTERCUSTOMFUNCTION_HPP +#define CLUSTERCUSTOMFUNCTION_HPP + +#include +#include +#include +#include "boost/smart_ptr.hpp" +#include "cluster.hpp" +#include "exceptions.hpp" + +#ifdef RCPP_INTERFACE +#include +#endif + +#ifdef RCPP_INTERFACE +namespace gmum { + class ClusterCustomFunction: public ClusterUseCovMat { + private: + boost::shared_ptr m_function; + double calculate_entropy(int n, const arma::mat &cov_mat); + public: + ClusterCustomFunction(int count, const arma::rowvec& mean, + const arma::mat& cov_mat, boost::shared_ptr function); + ClusterCustomFunction(unsigned int id, + const std::vector &assignment, + const arma::mat &points, boost::shared_ptr function); + virtual ClusterCustomFunction* clone(); + }; + +} + +#endif + +#endif // CLUSTER_CUSTOM_FUNCTION_HPP diff --git a/inst/include/cec/cluster_params.hpp b/inst/include/cec/cluster_params.hpp new file mode 100644 index 00000000..68dd4c03 --- /dev/null +++ b/inst/include/cec/cluster_params.hpp @@ -0,0 +1,67 @@ +#ifndef CLUSTERPARAMS_HPP +#define CLUSTERPARAMS_HPP + +#include + +#ifdef RCPP_INTERFACE +#include +using namespace Rcpp; +using namespace arma; +#else +#include +using namespace arma; +#endif + +namespace gmum { + + enum ClusterType { + kstandard, kfixed_covariance, kdiagonal, kfixed_spherical, kspherical, kno_type, kmix, kcustom + }; + +/** + * You need to store somewhere data about cluster. Each type can have its own. + */ + struct ClusterParams { + ClusterType type; + ClusterParams(ClusterType _type) : + type(_type) { + } + virtual ~ClusterParams() { }; + }; + + struct ClusterFixedCovarianceParams : public ClusterParams { + bool cov_mat_set; + mat cov_mat; + ClusterFixedCovarianceParams(mat _cov_mat) : + ClusterParams(kfixed_covariance), cov_mat_set(true), cov_mat(_cov_mat) { + } + ClusterFixedCovarianceParams() : + ClusterParams(kfixed_covariance), cov_mat_set(false) { + } + virtual ~ClusterFixedCovarianceParams() { } + }; + + struct ClusterSphericalFixedRParams : public ClusterParams { + bool radius_set; + double radius; + ClusterSphericalFixedRParams(double _radius) : + ClusterParams(kfixed_spherical), radius_set(true), radius(_radius) { + } + ClusterSphericalFixedRParams() : + ClusterParams(kfixed_spherical), radius_set(false) { + } + virtual ~ClusterSphericalFixedRParams() { } + }; + +#ifdef RCPP_INTERFACE + struct ClusterCustomParams: public ClusterParams { + boost::shared_ptr function; + ClusterCustomParams(boost::shared_ptr _function) : ClusterParams(kcustom), function(_function) {} + ClusterCustomParams() : ClusterParams(kcustom) {} + virtual ~ClusterCustomParams() { } + }; +#endif + +} + +#endif diff --git a/inst/include/cec/const.hpp b/inst/include/cec/const.hpp new file mode 100644 index 00000000..25a27017 --- /dev/null +++ b/inst/include/cec/const.hpp @@ -0,0 +1,94 @@ +#ifndef CONST_HPP +#define CONST_HPP + +#include "params.hpp" + +namespace gmum { + +struct CONST { + static const char* dataset; + static const char* nclusters; + static const char* clusters; + static const char* nstart; + static const char* centroids_list; + static const char* kill_threshold; + static const char* itmax; + static const char* energy; + static const char* log_clusters; + + struct CLUSTERS { + static const char* type; + static const char* standard; + static const char* fixed_covariance; + static const char* diagonal; + static const char* spherical; + static const char* fixed_spherical; + static const char* custom; + + static const char* init; + static const char* kmeanspp; + static const char* random; + static const char* centroids; + + static const char* cov_mat; + static const char* radius; + static const char* function_name; + }; + + static const unsigned int nclusters_init; + static const AssignmentType default_assignment; + + struct ERRORS { + static const char* dataset_size; + static const char* assignment_error; + static const char* cov_mat_req; + static const char* radius_req; + static const char* cluster_rec_error; + static const char* centroids_error; + static const char* function_name_req; + }; +}; + +const char* CONST::dataset = "x"; +const char* CONST::nclusters = "k"; +const char* CONST::clusters = "params.mix"; +const char* CONST::nstart = "control.nstart"; +const char* CONST::centroids_list = "params.centroids"; +const char* CONST::kill_threshold = "control.eps"; +const char* CONST::itmax = "control.itmax"; +const char* CONST::energy = "log.energy"; +const char* CONST::log_clusters = "log.ncluster"; + +const char* CONST::CLUSTERS::type = "method.type"; +const char* CONST::CLUSTERS::standard = "standard"; +const char* CONST::CLUSTERS::fixed_covariance = "fixed_covariance"; +const char* CONST::CLUSTERS::diagonal = "diagonal"; +const char* CONST::CLUSTERS::spherical = "spherical"; +const char* CONST::CLUSTERS::fixed_spherical = "fixed_spherical"; +const char* CONST::CLUSTERS::custom = "func"; + +const char* CONST::CLUSTERS::init = "method.init"; +const char* CONST::CLUSTERS::kmeanspp = "kmeans++"; +const char* CONST::CLUSTERS::random = "random"; +const char* CONST::CLUSTERS::centroids = "centroids"; + +const char* CONST::CLUSTERS::cov_mat = "params.cov"; +const char* CONST::CLUSTERS::radius = "params.r"; +const char* CONST::CLUSTERS::function_name = "params.function"; + +const unsigned int CONST::nclusters_init = 10; +const AssignmentType CONST::default_assignment = kkmeanspp; +const char* CONST::ERRORS::dataset_size = + "Size of dataset cannot be less than number of clusters!"; +const char* CONST::ERRORS::assignment_error = + "Cannot recognise assignment initialization method!"; +const char* CONST::ERRORS::cov_mat_req = "Covariance matrix is required!"; +const char* CONST::ERRORS::radius_req = "Radius is required!"; +const char* CONST::ERRORS::cluster_rec_error = "Cannot recognise cluster type!"; +const char* CONST::ERRORS::centroids_error = + "Number of centroids is different from number of clusters!"; +const char* CONST::ERRORS::function_name_req = "Function is required!"; + +} + +#endif diff --git a/inst/include/cec/exceptions.hpp b/inst/include/cec/exceptions.hpp new file mode 100644 index 00000000..34f9d31b --- /dev/null +++ b/inst/include/cec/exceptions.hpp @@ -0,0 +1,7 @@ +#ifndef EXCEPTIONS_HPP +#define EXCEPTIONS_HPP + +class NoPointsInCluster: public std::exception { +}; + +#endif diff --git a/inst/include/cec/hartigan.hpp b/inst/include/cec/hartigan.hpp new file mode 100644 index 00000000..445c45d8 --- /dev/null +++ b/inst/include/cec/hartigan.hpp @@ -0,0 +1,38 @@ +#ifndef HARTIGAN_HPP +#define HARTIGAN_HPP + + +#include +#include +#include +#include "algorithm.hpp" +#include "cluster.hpp" +#include "logger.h" +#include "utils.h" +#include + +namespace gmum { + +class Hartigan: public Algorithm { +private: + Logger m_logger; + void remove_cluster(unsigned int source, const arma::mat &points, + std::vector &assignment, + std::vector &clusters); + double calc_energy(double cross_entropy, int points_in_cluster, + int npoints); + double calc_energy_change(const Cluster& a, const Cluster &b, int npoints); +public: + Hartigan(bool log_nclusters, bool log_energy, int max_iter); + TotalResult loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters); + SingleResult single_loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters); + double entropy(boost::shared_ptr ptr_to_cluster, int npoints); +}; + +} + +#endif diff --git a/inst/include/cec/kmeanspp_assignment.hpp b/inst/include/cec/kmeanspp_assignment.hpp new file mode 100644 index 00000000..96c93d18 --- /dev/null +++ b/inst/include/cec/kmeanspp_assignment.hpp @@ -0,0 +1,45 @@ +#ifndef KMEANSPPASSIGNMENT_HPP +#define KMEANSPPASSIGNMENT_HPP + +#include +#include +#include +#include +#include "assignment.hpp" +#include "boost/random/bernoulli_distribution.hpp" +#include "boost/random/mersenne_twister.hpp" +#include "boost/random/uniform_int_distribution.hpp" + +namespace gmum { + +struct Point { + unsigned int point_number; + float distance; + Point(unsigned int n) : + point_number(n), distance(0) { + } + bool operator<(const Point p) const { + return distance >= p.distance; + } +}; + +void init_assign_kmeanspp(std::vector &assignment, + const arma::mat &points, unsigned int nclusters); + +void calculate_distance(const std::vector ¢ers, + std::list &selected, const arma::mat &points); + +std::list::iterator choose( + boost::random::bernoulli_distribution<> &bernoulli, + boost::random::mt19937 &gen, std::list &selected); + +class KmeansppAssignment: public Assignment { +public: + KmeansppAssignment(const arma::mat &points, const int nclusters) : + Assignment::Assignment(points, nclusters) { + } + virtual void operator()(std::vector &assignment); +}; +} + +#endif diff --git a/inst/include/cec/params.hpp b/inst/include/cec/params.hpp new file mode 100644 index 00000000..49f00d0a --- /dev/null +++ b/inst/include/cec/params.hpp @@ -0,0 +1,62 @@ +#ifndef PARAMS_HPP +#define PARAMS_HPP + +#include +#include +#include +#include "boost/smart_ptr.hpp" +#include "cluster_params.hpp" +#include "algorithm.hpp" + +namespace gmum { + +enum AssignmentType { + kkmeanspp, krandom, kcentroids +}; + +struct Params { +public: + arma::mat dataset; + double kill_threshold; + unsigned int nclusters; + bool log_nclusters; + bool log_energy; + unsigned int nstart; + int it_max; + AssignmentType assignment_type; + bool centroids_set; + std::list > centroids; + ClusterType cluster_type; + std::list > clusters; + bool cov_mat_set; + arma::mat cov_mat; + bool radius_set; + double radius; + boost::shared_ptr algorithm; + int seed; + +#ifdef RCPP_INTERFACE + boost::shared_ptr function; +#endif + + Params() : + kill_threshold(0.0001), + nclusters(0), + log_nclusters(false), + log_energy(false), + nstart(1), + it_max(25), + assignment_type(kkmeanspp), + centroids_set(false), + cluster_type(kno_type), + cov_mat_set(false), + radius_set(false), + radius(1.5) + { + seed = time(0); + } +}; + +} + +#endif diff --git a/inst/include/cec/random_assignment.hpp b/inst/include/cec/random_assignment.hpp new file mode 100644 index 00000000..5f033ac6 --- /dev/null +++ b/inst/include/cec/random_assignment.hpp @@ -0,0 +1,30 @@ +#ifndef RANDOMASSIGNMENT_HPP +#define RANDOMASSIGNMENT_HPP + +#include +#include +#include +#include "assignment.hpp" +#include "boost/random/mersenne_twister.hpp" +#include "boost/random/uniform_int_distribution.hpp" +#include "cec_configuration.hpp" + +namespace gmum { + +void init_assign_random(std::vector &assignment, + const arma::mat &points, unsigned int nclusters); + +class RandomAssignment: public Assignment { +public: + RandomAssignment(const arma::mat &points, const int nclusters) : + Assignment::Assignment(points, nclusters) { + } + + virtual void operator()(std::vector &assignment); + +}; + +} + +#endif + diff --git a/inst/include/gng/globals.h b/inst/include/gng/globals.h new file mode 100644 index 00000000..fe33e041 --- /dev/null +++ b/inst/include/gng/globals.h @@ -0,0 +1,16 @@ +/* + * File: GNGGlobals.h + * Author: Stanislaw "kudkudak" Jastrzebski + * + * Created on August 19, 2012, 2:30 PM + */ + +#ifndef GNGGLOBALS_H +#define GNGGLOBALS_H + +#define GNG_MAX_DIM 1000 + +const double EPS = 1E-9; + +#endif /* GNGGLOBALS_H */ + diff --git a/inst/include/gng/gng.h b/inst/include/gng/gng.h new file mode 100644 index 00000000..13f024ce --- /dev/null +++ b/inst/include/gng/gng.h @@ -0,0 +1,24 @@ +/* + * File: GNGInclude.h + * Author: staszek + * + * Created on 12 sierpieĹ„ 2012, 11:56 + */ +#ifndef GNGINCLUDE_H +#define GNGINCLUDE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "utils/logger.h" + + + +#endif /* GNGINCLUDE_H */ + diff --git a/inst/include/gng/gng_algorithm.h b/inst/include/gng/gng_algorithm.h new file mode 100644 index 00000000..e556162b --- /dev/null +++ b/inst/include/gng/gng_algorithm.h @@ -0,0 +1,202 @@ +/* + * File: GNGAlgorithm.h + * Author: Stanislaw "kudkudak" Jastrzebski + * + * Created on 11 sierpieĹ„ 2012, 10:02 + */ + +#ifndef GNGALGORITHM_H +#define GNGALGORITHM_H + +#include +#include +#include +#include +#include +#include + +#include "utils/threading.h" +#include "utils/circular_buffer.h" + +#include +#include + +using namespace std; + +//TODO: shorten constructor + +namespace gmum { + +/** + * The main class of the implementation dealing with computations. + * It should be agnostic of inner working (memory management etc.) of the graph and database. + * Also should not be concerned with locking logic. + * + * + * @note Many algorithm results are retrievable only when it is paused for performance reasons + */ +class GNGAlgorithm { +public: + /**Construct main algorithm object + * @param db GNGDataset object + * @param boundingbox_origin Starting point for reference system + * @param boundingbox_axis Axis lengths for reference system + * @param l Starting box size for uniform grid. Advised to be set to axis[0]/4 (TODO: move to the end of parameters list) + * @param max_nodes Maximum number of nodes + * @param max_age Maximum age of edge + * @param alpha See original paper(TODO: add description) + * @param betha See original paper (TODO: add description) + * @param lambda Every lambda new vertex is added + * @param eps_v See original paper(TODO: add description) + * @param eps_n See original paper (TODO: add description) + * @param dim Dimensionality + */ + GNGAlgorithm(GNGGraph * g, GNGDataset * db, double * boundingbox_origin, + double * boundingbox_axis, double l, int max_nodes = 1000, + int max_age = 200, double alpha = 0.95, double betha = 0.9995, + double lambda = 200, double eps_w = 0.05, double eps_n = 0.0006, + int dim = 3, bool uniformgrid_optimization = true, + bool lazyheap_optimization = true, unsigned int utility_option = + GNGConfiguration::UtilityOff, double utility_k = -1, + int max_iter = -1, int seed=777, + boost::shared_ptr logger = boost::shared_ptr()); + + /** Run main loop of the algorithm*/ + void runAlgorithm(); + + ///Retrieve closest node's gng_index to the example + int predict(const std::vector &); + + //Updates clustering field on the dataset kept in memory + void updateClustering(); + + void run(bool synchronized=true); + void pause(bool synchronized=true); + bool isRunning(); + void terminate(bool synchronized=true); + + unsigned getErrorIndex() const; + void setMaxNodes(int value); + int getIteration() const; + double getMeanError(); + vector > getMeanErrorStatistics(); + + //Retrieve clustering result. + //@note pauses algorithm as many + const vector & get_clustering(); + + virtual ~GNGAlgorithm(); +public: + //TODO: don't use list in UniformGrid + typedef std::list Node; + + int calculated_errors; //for convergence checking + circular_buffer > m_mean_error; //error of the network + int m_lambda; //lambda parameter + double m_eps_w, m_eps_n; //epsilon of the winner and of the neighbour + int m_max_age, m_max_nodes, m_iteration; + + bool m_toggle_uniformgrid, m_toggle_lazyheap; + + int max_iter; + + double m_utility_k; + int m_utility_option; + + double m_alpha, m_betha; + double * m_betha_powers; + int m_betha_powers_to_n_length; + double * m_betha_powers_to_n; + int m_betha_powers_size; + double m_accumulated_error; + + int dim; + boost::shared_ptr m_logger; + + + double m_density_threshold, m_grow_rate; + + /** Constants used by lazy heap implementation */ + int s, c; + + GNGGraph & m_g; + GNGDataset * g_db; + UniformGrid, Node, int> * ug; + GNGLazyErrorHeap errorHeap; + + enum GngStatus { + GNG_PREPARING, GNG_RUNNING, GNG_PAUSED, GNG_TERMINATED + }; + + GngStatus m_gng_status, m_gng_status_request; + bool running; + + enum UtilityOptions { + None, BasicUtility + }; + + gmum::recursive_mutex status_change_mutex; + //rewrite to the same locking logic as get_clustering + gmum::fast_mutex m_statistics_mutex; + gmum::gmum_condition status_change_condition; + + vector clustering_result; +private: + //Main algorithm methods + + mt19937 mt_rand; + + //@return error and closest node index + std::pair adapt(const double * ex, const double * extra); + std::pair _getNearestNeurons(const double *ex); + + void randomInit(); + void addNewNode(); + + GNGNode ** LargestErrorNodesLazy(); + GNGNode ** LargestErrorNodes(); + + bool stoppingCriterion(); + + //Utility functions + + double calculateAccumulatedError(); + void resetUniformGrid(double * orig, double *axis, double l); + void resizeUniformGrid(); + + //sets clustering assignment of given node + void set_clustering(unsigned int ex, unsigned int node_idx); + + void increaseErrorNew(GNGNode * node, double error); + void fixErrorNew(GNGNode * node); + double getMaximumError() const; + void decreaseAllErrorsNew(); + void decreaseErrorNew(GNGNode * node); + void setErrorNew(GNGNode * node, double error); + void increaseError(GNGNode * node, double error); + void decreaseAllErrors(); + void decreaseError(GNGNode * node); + void setError(GNGNode * node, double error); + + // Note: this code is not optimal and is inserted only for research purposes + double getUtility(int i); + void setUtility(int i, double u); + void utilityCriterionCheck(); + void decreaseAllUtility(); + + GngStatus gng_status() { + return m_gng_status; + } +}; + +/**Design hack for passing distance function dist(index, position)*/ +struct GNGGraphAccessHack { + static GNGGraph * pool; + static double dist(int index, double *position) { + return pool->get_euclidean_dist((*pool)[index].position, position); + } +}; + +} + +#endif diff --git a/inst/include/gng/gng_configuration.h b/inst/include/gng/gng_configuration.h new file mode 100644 index 00000000..b6287f6a --- /dev/null +++ b/inst/include/gng/gng_configuration.h @@ -0,0 +1,154 @@ +/* + * File: GNGConfiguration.h + * Author: staszek + * + * Created on October 17, 2013, 8:11 PM + */ + +#ifndef GNGCONFIGURATION_H +#define GNGCONFIGURATION_H + +#ifdef RCPP_INTERFACE +#include +#include +using namespace Rcpp; +#endif + +#include "utils/utils.h" +#include "gng_graph.h" +#include + +///Configuration of GNG algorithm/server +struct GNGConfiguration { + enum GraphNodeStorage { + NoneGraphNodeStorage, SharedMemory, RAMMemory + } graph_storage; + + enum DatasetType { + NoneDatasetTypeinit, DatasetSeq, DatasetSampling, DatasetSamplingProb + }; + + enum ExperimentalUtility { + UtilityOff, UtilityBasicOn + }; + + int seed; + + /**Maximum number of nodes*/ + int max_nodes; //=1000; + /**Uniform grid optimization*/ + bool uniformgrid_optimization; //=true,lazyheap=true; + /**Lazy heap optimization*/ + bool lazyheap_optimization; + /**Bounding box specification*/ + + /**Dimensionality of examples*/ + unsigned dim; + + std::vector orig; + std::vector axis; + /**Max edge age*/ + int max_age; //=200; + /**Alpha coefficient*/ + double alpha; //=0.95; + /**Beta coefficient*/ + double beta; //=0.9995; + /**Lambda coefficient*/ + double lambda; //=200; + /**Epsilion v. How strongly move winning node*/ + double eps_w; //=0.05; + /**Memory bound*/ + int graph_memory_bound; + /**Epsilion n*/ + double eps_n; //=0.0006; + + ///Maximum number of iterations + int max_iter; //=-1; + + int verbosity; + + /**Pseudodistance function used (might be non metric)*/ + int distance_function; + + /**Type of used database, unsgined int for compabititlity with Rcpp**/ + unsigned int datasetType; + + /**Initial reserve memory for nodes */ + int starting_nodes; + + ///Utility constant + double experimental_utility_k; + + ///Utility option. Currently supported simples utility + int experimental_utility_option; + + GNGConfiguration() { + seed = -1; //is equivalent to null + + verbosity = 1; + + max_iter = -1; + + starting_nodes = 100; + + experimental_utility_option = (int) UtilityOff; + experimental_utility_k = 1.5; + + graph_storage = RAMMemory; + + dim = 3; + setBoundingBox(0, 1); + + datasetType = DatasetSampling; + max_nodes = 1000; + uniformgrid_optimization = false; + graph_memory_bound = 200000 * sizeof(double); + + lazyheap_optimization = false; + max_age = 200; + alpha = 0.95; + beta = 0.9995; + lambda = 200; + eps_w = 0.05; + eps_n = 0.0006; + + distance_function = gmum::GNGGraph::Euclidean; + + } + + void deserialize(std::istream & in); + + void serialize(std::ostream & out) const; + + //This is a simplification - we assume square box + void setBoundingBox(double min, double max) { + orig = vector(); + axis = vector(); + for (size_t i = 0; i < dim; ++i) { + orig.push_back(min); + axis.push_back(max - min); + } + } + + /// Get default configuration of GNG Server + static GNGConfiguration getDefaultConfiguration() { + GNGConfiguration default_configuration; + return default_configuration; + } + + friend ostream & operator<<(ostream & out, const GNGConfiguration & conf){ + conf.serialize(out); + return out; + } + + #ifdef RCPP_INTERFACE + void show() const{ + serialize(Rcpp::Rcerr); + } + #endif + + /// Validate server configuration + bool check_correctness(); +}; +#endif /* GNGCONFIGURATION_H */ + diff --git a/inst/include/gng/gng_dataset.h b/inst/include/gng/gng_dataset.h new file mode 100644 index 00000000..411001c2 --- /dev/null +++ b/inst/include/gng/gng_dataset.h @@ -0,0 +1,209 @@ +/* + * File: GNGExampleManager.h + * Author: StanisĹ‚aw "kudkudak" Jastrzebski + * + * Created on 11 sierpieĹ„ 2012, 10:47 + */ +#ifndef GNGDATABASE_H +#define GNGDATABASE_H + +#include +#include +#include +#include +#include +#include + +#include "utils/threading.h" +#include "utils/utils.h" + +namespace gmum { + +/** Database for growing neural gas interface + * + * @note Drawing example is not very time-expensive comapred to other problems (like + * neighbour search). Therefore it is locking on addExample and drawExample + * + * @note Takes ownership of the memory inserted. Copy memory before inserting if you + * want to use this memory afterwards!! + */ +class GNGDataset { +public: + + virtual int getDataDim() const=0; + + virtual int getGNGDim() const =0; + + ///Returns index of example drawn + virtual unsigned int drawExample()=0; + + ///Retrieves pointer to position + virtual const double * getPosition(unsigned int) const=0; + + ///Retrieves pointer to vertex data, with unsigned int as descriptor of meta + virtual const double * getExtraData(unsigned int) const=0; + + ///Inserts examples to the dataset + virtual void insertExamples(const double *, const double*, const double *, + unsigned int count)=0; + + virtual void removeExample(unsigned int)=0; + + virtual int size() const=0; + + virtual ~GNGDataset() { + } + + virtual void lock() = 0; + virtual void unlock() = 0; +}; + +///Storage :< GNGDatabaseStorage +template +class GNGDatasetSimple: public GNGDataset { + +protected: + const unsigned int dim_; + + gmum::recursive_mutex * mutex_; + + vector storage_; + vector storage_extra_; + vector storage_probability_; + + bool store_extra_; + unsigned int current_example_; + + boost::shared_ptr logger_; + + mt19937 mt_rand; +public: + + enum AccessMethod { + Sequential, Sampling, SamplingProbability + } access_method_; + + /* + * @param prob_location If prob location is -1 (by default) means there + * is no probability data in meta data. + */ + GNGDatasetSimple(gmum::recursive_mutex *mutex, unsigned int dim, + bool store_extra = false, AccessMethod access_method = Sequential, + int seed = 777, + boost::shared_ptr logger = boost::shared_ptr()) : + mutex_(mutex), store_extra_(store_extra), dim_(dim), access_method_( + access_method), current_example_(0), logger_(logger), mt_rand(seed) { + + } + + void lock() { + mutex_->lock(); + } + void unlock() { + mutex_->unlock(); + } + + ~GNGDatasetSimple() { + DBG_PTR(logger_,10, "GNGDatasetSimple:: destroying"); + } + + ///Retrieves pointer to position + const T * getPosition(unsigned int i) const { + return &storage_[i * dim_]; + } + + const T * getExtraData(unsigned int i) const { + if (!store_extra_) + return 0; + return &storage_extra_[i]; + } + + + + unsigned int drawExample() { + + if (access_method_ != Sequential) { + if (access_method_ == Sampling) { + return RANDOM_INT(mt_rand, 0, size() - 1); + } else { + const double * ex; + unsigned int index; + + do { + index = RANDOM_INT(mt_rand, 0, size() - 1); + ex = getPosition(index); + } while (storage_probability_[index] < __double_rnd(0, 1.0)); + + return index; + } + } else { + unsigned int tmp = current_example_; + + current_example_ = (current_example_ + 1) % size(); + + return tmp; + + } + } + + void insertExamples(const double * positions, const double *extra, + const double *probability, unsigned int count) { + + if (storage_.capacity() < storage_.size() + count * dim_) { + DBG_PTR(logger_,10, "Resizing storage_"); + storage_.reserve(storage_.size() + count * dim_); + DBG_PTR(logger_,10, "Resized storage_"); + } + + storage_.insert(storage_.end(), positions, positions + count * dim_); + + if (store_extra_) { + if (storage_extra_.capacity() < storage_extra_.size() + count) { + DBG_PTR(logger_,10, "Resizing store_extra_"); + storage_extra_.reserve(storage_extra_.size() + count); + } + + if (!extra) { + for (size_t i = 0; i < count; ++i) + storage_extra_.push_back(0); + } else { + storage_extra_.insert(storage_extra_.end(), extra, + extra + count); + } + } + if (access_method_ == SamplingProbability) { + ASSERT(probability); + + if (storage_probability_.capacity() + < storage_probability_.size() + count) { + DBG_PTR( logger_,10, "Resizing storage_probability_"); + storage_probability_.reserve( + storage_probability_.size() + count); + } + + storage_probability_.insert(storage_probability_.end(), probability, + probability + count); + } + } + + void removeExample(unsigned int index) { + throw BasicException("Not implemented"); + } + + int size() const { + return storage_.size() / dim_; + } + + virtual int getDataDim() const { + return dim_; + } + + virtual int getGNGDim() const { + return dim_; + } +private: +}; +} + +#endif +/* GNGEXAMPLEMANAGER_H */ diff --git a/inst/include/gng/gng_defines.h b/inst/include/gng/gng_defines.h new file mode 100644 index 00000000..dfecd863 --- /dev/null +++ b/inst/include/gng/gng_defines.h @@ -0,0 +1,48 @@ +/* + * File: GNGDefines.h + * Author: staszek + * + * Created on August 15, 2012, 6:37 AM + */ + +#ifndef GNGDEFINES_H +#define GNGDEFINES_H + +#include +#include + +namespace gmum { + +/** Carries message about message to look for in shared memory used for interprocess communication + * @Note : We are using shared memory for interprocess communication, not message queue, because storing objects in shared memory is quite convenient + */ +struct SHGNGMessage { + enum State { + NoState, Waiting, Processed + }; + enum Type { + NoType, AddExamples, /**Interprocess:vector of GNGExample*/ + Request /**String, @note: for subset of request we can create more efficient communication protocol*/ + }; + + int state; + int type; +}; + +/** Filled struct to pass add example SHGNGMessage + * @note All post message memory managment is held by server (if there is need of course) + * + * count - number of examples + * dim - number of dimensions for example + * pointer_reference_name - pointer name to look up in shared memory + */ +struct SHGNGMessageAddExamples { + int count; + int dim; + std::string pointer_reference_name; +}; + +} + +#endif /* GNGDEFINES_H */ + diff --git a/inst/include/gng/gng_graph.h b/inst/include/gng/gng_graph.h new file mode 100644 index 00000000..b6d5f751 --- /dev/null +++ b/inst/include/gng/gng_graph.h @@ -0,0 +1,168 @@ +/* + * File: SHGraph.h + * Author: staszek + * + * Created on 11 sierpieĹ„ 2012, 09:07 + */ + +#ifndef GNGGraph_H +#define GNGGraph_H + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +namespace gmum { + +class GNGGraph { +public: + + enum GNGDistanceFunction { + Euclidean, Cosine + }; + + virtual ~ GNGGraph() { + } + + /** Lock from unsafe operations + * @note It ensures that operations won't fail (in worst case block) + * Mostly used for blocking regrowing + */ + virtual void lock() { + //Do nothing by default + } + + /** Unlock for unsafe operations */ + virtual void unlock() { + //Do nothing by default + } + + /** This is specific for GNG Graph - e + * each node is assigned index. It fetches maximum node index + */ + virtual unsigned int get_maximum_index() const = 0; + + /* + * @return True if exists node in the graph + */ + virtual bool existsNode(unsigned int) const = 0; + + virtual int get_dim() const = 0; + + virtual GNGNode & operator[](int i) = 0; + + virtual unsigned int get_number_nodes() const = 0; + + virtual double get_dist(int a, int b) = 0; + + virtual double get_euclidean_dist(const double * pos_1, + const double * pos_2) const= 0; + + virtual double get_dist(const double *pos_a, const double *pos_b) const = 0; + + /* Initialize node with position attribute */ + virtual int newNode(const double *position) = 0; + + virtual bool deleteNode(int x) = 0; + + virtual bool isEdge(int a, int b) const = 0; + + virtual GNGNode::EdgeIterator removeUDEdge(int a, int b) = 0; + + virtual void addUDEdge(int a, int b) = 0; + + virtual void addDEdge(int a, int b) = 0; + + virtual std::string reportPool() { + return ""; + } + + virtual void load(std::istream & in) = 0; + virtual void serialize(std::ostream & out) = 0; + +}; + +/* + * Node: implements GNGNode interface + * Edge: implements GNGEdge interface + * Mutex: implements lock and unlock interface + */ +template +class RAMGNGGraph: public GNGGraph { +public: + // Indicates next free vertex + std::vector next_free; + int first_free; + GNGDistanceFunction dist_fnc; + + typedef typename Node::EdgeIterator EdgeIterator; + + RAMGNGGraph(Mutex * mutex, unsigned int dim, int initial_pool_size, + GNGDistanceFunction dist_fnc = Euclidean, + boost::shared_ptr logger = boost::shared_ptr()); + + Node & operator[](int i); + + int newNode(const double *position); + bool deleteNode(int x); + EdgeIterator removeUDEdge(int a, int b); + void addUDEdge(int a, int b); + void addDEdge(int a, int b); + + std::string reportPool(); + + virtual int get_dim() const; + double get_dist(int a, int b); + double get_euclidean_dist(const double *pos_a, const double *pos_b) const; + double get_dist(const double *pos_a, const double *pos_b) const; + virtual unsigned int get_maximum_index() const; + virtual bool existsNode(unsigned i) const; + bool isEdge(int a, int b) const; + const double *getPosition(int nr) const; + unsigned int get_number_nodes() const; + + virtual void lock(); + virtual void unlock(); + + /* + * format is [N] [gng_dim] N* [0/1 + vertex] N*[ [l] l*[gng_idx]] + */ + void serialize(std::ostream & output); + void load(std::istream & input); + + ~RAMGNGGraph(); +private: + Mutex * mutex; + + std::vector g; + std::vector occupied; + std::vector positions; //as continuous array for speed/caching purposes, could be vector + + int maximum_index; + unsigned int nodes, gng_dim; + + boost::shared_ptr m_logger; + +private: + void resizeGraph(); +}; + +#include + +static std::string writeToGraphML(GNGGraph &g, string filename = ""); + +} +#endif diff --git a/inst/include/gng/gng_graph.hpp b/inst/include/gng/gng_graph.hpp new file mode 100644 index 00000000..f909a63f --- /dev/null +++ b/inst/include/gng/gng_graph.hpp @@ -0,0 +1,525 @@ +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; +using namespace gmum; + +template +RAMGNGGraph::RAMGNGGraph(Mutex *mutex, unsigned int dim, + int initial_pool_size, GNGDistanceFunction dist_fnc, + boost::shared_ptr logger) : + maximum_index(-1), mutex(mutex), gng_dim(dim), first_free(-1), nodes(0), dist_fnc( + dist_fnc), m_logger(logger) { + + positions.resize(initial_pool_size * gng_dim); + + //Initialize graph data structures + g.resize(initial_pool_size); + + for (int i = 0; i < initial_pool_size; ++i) + g[i].reserve(gng_dim); + + occupied.resize(initial_pool_size); + + for (int i = 0; i < initial_pool_size; ++i) + occupied[i] = false; + next_free.resize(initial_pool_size); + + for (int i = 0; i < initial_pool_size - 1; ++i) + next_free[i] = i + 1; + next_free[initial_pool_size - 1] = -1; + first_free = 0; + +} + +template +RAMGNGGraph::~RAMGNGGraph() { + for (int i = 0; i < g.size(); ++i) { + if (occupied[i]) { + BOOST_FOREACH(GNGEdge *edg, g[i]) + delete edg; + } + } + +} + +template +unsigned int RAMGNGGraph::get_maximum_index() const { + return this->maximum_index; +} + +template +bool RAMGNGGraph::existsNode(unsigned i) const { + return i < nodes && occupied[i]; +} + +template +bool RAMGNGGraph::isEdge(int a, int b) const { + + BOOST_FOREACH(GNGEdge *edg, g[a]) { + if ((edg)->nr == b) + return true; + } + return false; +} + +template +const double *RAMGNGGraph::getPosition(int nr) const { + return g[nr].position; +} + +template +unsigned int RAMGNGGraph::get_number_nodes() const { + return this->nodes; +} + +template +Node & +RAMGNGGraph::operator[](int i) { + return g[i]; +} + +template +double RAMGNGGraph::get_dist(int a, int b) { + return get_dist(g[a].position, g[b].position); +} + +template +double RAMGNGGraph::get_euclidean_dist(const double *pos_a, + const double *pos_b) const { + double distance = 0; + for (int i = 0; i < this->gng_dim; ++i) + distance += (pos_a[i] - pos_b[i]) * (pos_a[i] - pos_b[i]); + + return distance; +} + +template +double RAMGNGGraph::get_dist(const double *pos_a, + const double *pos_b) const { + ASSERT(dist_fnc == Euclidean || dist_fnc == Cosine); + if (dist_fnc == Euclidean) { + double distance = 0; + for (size_t i = 0; i < this->gng_dim; ++i) + distance += (pos_a[i] - pos_b[i]) * (pos_a[i] - pos_b[i]); + + return distance; + } else { + double norm_1 = 0, norm_2 = 0, distance = 0; + + for (size_t i = 0; i < this->gng_dim; ++i) { + norm_1 += (pos_a[i]) * (pos_a[i]); + norm_2 += (pos_b[i]) * (pos_b[i]); + distance += pos_a[i] * pos_b[i]; + } + + norm_1 = sqrt(norm_1); + norm_2 = sqrt(norm_2); + return 1.0 - distance / (norm_1 * norm_2); + } +} + +template +int RAMGNGGraph::newNode(const double *position) { + if (first_free == -1) { + DBG_PTR(m_logger, 10, "RAMGNGGraph::newNode() growing pool"); + this->resizeGraph(); + + } + + int createdNode = first_free; //taki sam jak w g_node_pool + + maximum_index = createdNode > maximum_index ? createdNode : maximum_index; + + ASSERT(g[createdNode].size() == 0); + + // Initialize node + g[createdNode].position = &positions[createdNode * gng_dim]; + occupied[createdNode] = true; + g[createdNode].nr = createdNode; + g[createdNode].edgesCount = 0; + g[createdNode].utility = 0.0; + g[createdNode]._position_owner = false; + g[createdNode].dim = gng_dim; + g[createdNode].extra_data = 0.0; + + first_free = next_free[createdNode]; + + ++this->nodes; + memcpy(&(g[createdNode].position[0]), position, + sizeof(double) * (this->gng_dim)); //param + + //TODO: this should be tracked by GNGAlgorithm + g[createdNode].error = 0.0; + g[createdNode].error_cycle = 0; + + return createdNode; + +} + +template +bool RAMGNGGraph::deleteNode(int x) { + + this->lock(); + if (existsNode(x)) { + //TODO: add automatic erasing edges + ASSERT(g[x].size() == 0); + + --nodes; + if (maximum_index == x) + maximum_index = maximum_index - 1; + + occupied[x] = false; + next_free[x] = first_free; + first_free = x; + this->unlock(); + return true; + + } + + this->unlock(); + return false; + +} + +template +typename RAMGNGGraph::EdgeIterator RAMGNGGraph::removeUDEdge(int a, int b) { + + this->lock(); + + for (typename Node::iterator edg = g[a].begin(); edg != g[a].end(); ++edg) { + if ((*edg)->nr == b) { + Edge *ptr_rev = (Edge *) ((**edg).rev); + Edge *ptr = (Edge *) (&(**edg)); + + g[b].erase(find(g[b].begin(), g[b].end(), (*edg)->rev)); + + edg = g[a].erase(edg); + + delete ptr; + delete ptr_rev; + + g[a].edgesCount--; + g[b].edgesCount--; + this->unlock(); + return edg; + } + } + + this->unlock(); + DBG_PTR(m_logger, 10, "ExtGraphNodeManager()::removeEdge Not found edge!"); + return g[a].end(); + +} + +template +void RAMGNGGraph::addUDEdge(int a, int b) { + + this->lock(); + + if (a == b) + throw "Added loop to the graph"; + + g[a].push_back(new Edge(b)); + g[b].push_back(new Edge(a)); + + g[a].back()->rev = g[b].back(); + g[b].back()->rev = g[a].back(); + + g[a].edgesCount++; + g[b].edgesCount++; + this->unlock(); + +} + +template +void RAMGNGGraph::addDEdge(int a, int b) { + throw BasicException("Not implemented"); +} + +template +std::string RAMGNGGraph::reportPool() { + std::stringstream ss; + for (unsigned int i = 0; i < g.size(); ++i) { + string tmp = ""; + if (occupied[i]) { + tmp = tmp + to_str(g[i]) + ":"; + BOOST_FOREACH(GNGEdge *it2, g[i]) { + tmp += to_str((it2)->nr) + "[" + to_str((((it2)->rev))->nr) + + "],"; + } + tmp = tmp + "\n"; + } + ss << tmp; + } + return ss.str(); +} + +template +int RAMGNGGraph::get_dim() const { + return gng_dim; +} + +template +void RAMGNGGraph::lock() { + mutex->lock(); +} + +template +void RAMGNGGraph::unlock() { + mutex->unlock(); +} + +template +void RAMGNGGraph::serialize(std::ostream &output) { + this->lock(); + + vector S; + S.reserve(10000); + + //Header + S.push_back((double) (g.size())); + S.push_back((double) (maximum_index + 1)); + S.push_back((double) gng_dim); + S.push_back((double) first_free); + S.push_back((double) nodes); + + DBG_PTR(m_logger, 7, "GNGGraph::Serializing nodes"); + //Nodes + for (int i = 0; i < g.size(); ++i) { + if (existsNode(i)) { + S.push_back((double) 1); + vector serialized_node = g[i].dumpVertexData(); + + std::copy(serialized_node.begin(), serialized_node.end(), + std::back_inserter(S)); + } else { + S.push_back((double) 0); + } + } + + DBG_PTR(m_logger, 7, "GNGGraph::Serializing edges"); + + //Edges + for (int i = 0; i < g.size(); ++i) { + if (existsNode(i)) { + vector serialized_node = g[i].dumpEdges(); + std::copy(serialized_node.begin(), serialized_node.end(), + std::back_inserter(S)); + } else { + S.push_back((double) 0); + } + }DBG_PTR(m_logger, 7, "GNGGraph::Serializing nextFree"); + //NextFree + for (int i = 0; i < g.size(); ++i) { + S.push_back((double) next_free[i]); + }DBG_PTR(m_logger, 7, "GNGGraph::Serialize;:writing out"); + + _write_bin_vect(output, S); + + this->unlock(); +} + +template +void RAMGNGGraph::load(std::istream &input) { + this->lock(); + + DBG_PTR(m_logger, 7, "GNGGraph:: loading "); + + vector S = _load_bin_vector(input); + vector::iterator itr = S.begin(); + //Header + unsigned int bufor_size = (int) *itr; + maximum_index = (int) *(++itr) - 1; + gng_dim = (int) *(++itr); + first_free = (int) *(++itr); + nodes = (int) *(++itr); + + DBG_PTR(m_logger, 5, "Read in " + to_str(bufor_size) + " sized graph with " + + " max_index=" + to_str(maximum_index) + " gng_dim=" + to_str(gng_dim) + " " + + "first_free=" + to_str(first_free) + " nodes=" + to_str(nodes) + ); + + positions.clear(); + g.clear(); + next_free.clear(); + occupied.clear(); + + occupied.resize(bufor_size); + g.resize(bufor_size); + next_free.resize(bufor_size); + positions.resize((bufor_size + 1) * gng_dim); + + for (size_t i = 0; i < bufor_size; ++i) { + occupied[i] = false; + g[i].reserve(gng_dim + 2); + } + + //Deserialize nodes + for (size_t i = 0; i < g.size(); ++i) { + int tmp = (int) *(++itr); + occupied[i] = (bool) tmp; + if (occupied[i]) + g[i].loadVertexData(itr, gng_dim, &positions[i * gng_dim]); + + } + + //Deserialize edges + for (size_t i = 0; i < g.size(); ++i) { + int edges_length = (int) *(++itr); + + for (int j = 0; j < edges_length; ++j) { + int gng_endpoint_index = (int) *(++itr); + if (gng_endpoint_index > i) + this->addUDEdge(i, gng_endpoint_index); + } + } + + //Deserialize nextFree + for (size_t i = 0; i < g.size(); ++i) { + next_free[i] = (int) *(++itr); + } + + this->unlock(); +} + +template +void RAMGNGGraph::resizeGraph() { + //DBG(m_logger,5, "GNGGraph::resizing graph from "+to_string(g.size())); + DBG_PTR(m_logger, 5, "GNGGraph::resizing"); + unsigned int previous_size = g.size(); + //Grow positions pool + + positions.resize(2 * previous_size * gng_dim); + + //Reassign memory pointers + for (size_t i = 0; i < previous_size; ++i) { + g[i].position = &positions[i * gng_dim]; + + } + + g.resize(2 * previous_size); + + for (size_t i = 0; i < previous_size; ++i) { + g[i].position = &positions[i * gng_dim]; + } + + occupied.resize(2 * previous_size); + for (int i = previous_size; i < 2 * previous_size; ++i) { + // g[i].reset(); + // g[i].reserve(gng_dim); //for speed purposes + occupied[i] = false; + } + + next_free.resize(2 * previous_size); + for (size_t i = previous_size - 1; i < 2 * previous_size - 1; ++i) { + next_free[i] = i + 1; + } + next_free[g.size() - 1] = -1; + first_free = previous_size; + + DBG_PTR(m_logger, 5, "GNGGraph::resizing done"); + DBG_PTR(m_logger, 5, to_str(first_free)); + DBG_PTR(m_logger, 5, to_str(next_free[previous_size])); +} + +static void writeToGraphML(GNGGraph &g, std::ostream &out) { + + g.lock(); + + out << "\n"; + out + << + "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << "\n"; + out + << + "\n"; + + std::map gng_index_to_graph_index; + + unsigned int k = 0; + for (int i = 0; i <= g.get_maximum_index(); ++i) { + + if (g.existsNode(i)) { + gng_index_to_graph_index[g[i].nr] = k; //TODO:To be honest I dnt remember purpose of g[i].nr.. + + out << "\n"; + out << "" << g[i].error << "\n"; + out << "" << g[i].extra_data << "\n"; + out << "" << g[i].nr << "\n"; + out << "" << g[i].utility << "\n"; + out << "" << g[i].position[0] << "\n"; + out << "" << g[i].position[1] << "\n"; + out << "" << g[i].position[2] << "\n"; + out << "\n"; + } + } + + unsigned int l = 0; + + for (unsigned int i = 0; i <= g.get_maximum_index(); ++i) + if (g.existsNode(i)) { + FOREACH(GNGEdge *edg, g[i]) { + if (g[i].nr > (edg)->nr) { //directed! + out << "nr] + << "\" target=\"n" + << gng_index_to_graph_index[g[i].nr] << "\">\n"; + out << "" << g.get_dist(i, (edg)->nr) + << ""; + out << "\n"; + } + } + + } + + out << "\n\n"; + g.unlock(); + +} + +static std::string writeToGraphML(GNGGraph &g, string filename) { + + if (filename == "") { + std::stringstream ss; + writeToGraphML(g, ss); + return ss.str(); + } else { + ofstream myfile(filename.c_str()); + writeToGraphML(g, myfile); + myfile.close(); + return ""; + } + +} + diff --git a/inst/include/gng/gng_lazy_error_heap.h b/inst/include/gng/gng_lazy_error_heap.h new file mode 100644 index 00000000..9dce0854 --- /dev/null +++ b/inst/include/gng/gng_lazy_error_heap.h @@ -0,0 +1,128 @@ +/* + * File: GNGLazyErrorHeap.h + * Author: staszek + * + * Created on August 21, 2012, 4:00 AM + */ + +#ifndef GNGLAZYERRORHEAP_H +#define GNGLAZYERRORHEAP_H + +#include +#include +namespace gmum { +struct ErrorNode { + double error; + int i; + ErrorNode(const ErrorNode & orig) { + error = orig.error; + i = orig.i; + } + ErrorNode(double error, int i) : + error(error), i(i) { + } + ErrorNode() { + } + bool operator>(const ErrorNode& rhs) { + return error > rhs.error; + } + bool operator<(const ErrorNode& rhs) { + return error < rhs.error; + } + bool operator>=(const ErrorNode& rhs) { + return error >= rhs.error; + } + bool operator<=(const ErrorNode& rhs) { + return error <= rhs.error; + } + friend std::ostream & operator<<(std::ostream & out, + const ErrorNode & rhs) { + out << "(" << rhs.error << "," << rhs.i << ")"; + return out; + } +}; + +class GNGLazyErrorHeap: protected Heap { + typedef Heap super; + +protected: + std::vector m_buffer; // nr> + std::vector m_isonlist; + + int m_buffer_size; + void checkBufferSize() { + if (m_buffer_size >= SIZE(m_buffer)) { + m_buffer.resize(3 * m_buffer_size); + m_isonlist.resize(3 * m_buffer_size); + } //domyslnie bool ma false (jako T()) + } + + std::list L; //list of nodes to be inserted on lazy top operation +public: + std::list & getLazyList() { + return L; + } + + GNGLazyErrorHeap() : + m_buffer_size(0), super() { + } + + void insertLazy(int nr) { + m_buffer_size = std::max(m_buffer_size, nr + 1); + checkBufferSize(); + + if (!m_isonlist[nr]) { + L.push_back(nr); + m_isonlist[nr] = true; + } + } + + void updateLazy(int nr) { + m_buffer_size = std::max(m_buffer_size, nr + 1); + checkBufferSize(); + + if (m_buffer[nr]) + super::remove(m_buffer[nr]); + m_buffer[nr] = 0; + + + if (!m_isonlist[nr]) { + L.push_back(nr); + m_isonlist[nr] = true; + } + } + + void update(int nr, double new_error) { + m_buffer_size = std::max(m_buffer_size, nr + 1); + checkBufferSize(); + + if (m_buffer[nr]) + super::remove(m_buffer[nr]); + m_buffer[nr] = super::insert(ErrorNode(new_error, nr)); + } + + void insert(int nr, double error) { + m_buffer_size = std::max(m_buffer_size, nr + 1); + checkBufferSize(); + + + if (m_buffer[nr] == 0) + m_buffer[nr] = reinterpret_cast(super::insert( + ErrorNode(error, nr))); + m_isonlist[nr] = false; + } + + ErrorNode extractMax() { + + ErrorNode max = super::extractMax(); + + m_buffer[max.i] = 0; //mark that it is removed from the heap + return max; + } + + using super::getSize; + +}; +} +#endif /* GNGLAZYERRORHEAP_H */ + diff --git a/inst/include/gng/gng_node.h b/inst/include/gng/gng_node.h new file mode 100644 index 00000000..3361900e --- /dev/null +++ b/inst/include/gng/gng_node.h @@ -0,0 +1,143 @@ +/* + * File: SHGraphDefs.h + * Author: staszek + * + * Created on 11 sierpieĹ„ 2012, 08:18 + */ + +#ifndef SHGRAPHDEFS_H +#define SHGRAPHDEFS_H + +#include +#include +#include +#include +#include +#include +#include + +/** + * Basic interface for Edge in GNGGraph. + */ +class GNGEdge { +public: + GNGEdge * rev; + int nr; + double error; + int age; + + GNGEdge() : + error(0), age(0) { + } + + GNGEdge(int nr) : + nr(nr), error(0.0), age(0) { + } +}; + +//Dump format [N nodes] [dim] (doubles) N * node->dump() + +///Warning dev note: GNGNode fields have to be properly copied/initialized form +///both GNGAlgorithm and 2 functions in GNGNode +///I should improve it shortly +class GNGNode: public std::vector { +public: + //TODO: change to GNGEdge, no need for dandling pointers + typedef std::vector::iterator EdgeIterator; + + double utility; //0 + int error_cycle; //1 + double error; //2 + int edgesCount; //3 + int nr; //4 + bool _position_owner; //5 + unsigned int dim; //6 + double extra_data; //7 - extra data that is voted among vertices when adapting + double * position; //8... 8+dim-1 + + // Construct empty GNGNode + GNGNode() { + //prevent memory corruption + _position_owner = false; + position = 0; + } + + ~GNGNode() { + if (_position_owner) { + delete[] position; + } + } + + double dist(GNGNode * gnode) const { //dist doesnt account for param + using namespace std; + double ret = 0; + for (size_t i = 0; i < dim; ++i) + ret += (this->position[i] - gnode->position[i]) + * (this->position[i] - gnode->position[i]); + return sqrt(ret); + } + + friend std::ostream& operator<<(std::ostream& out, const GNGNode & node) { + out << node.nr << "(" << node.error << ")("; + for (size_t i = 0; i < node.dim; ++i) { + out << node.position[i] << ","; + } + out << ")"; + + return out; + } + + vector dumpEdges() { + vector dump(1 + this->size(), 0.0); + dump[0] = this->size(); + for (size_t i = 0; i < this->size(); ++i) + dump[i + 1] = (*this)[i]->nr; + return dump; + } + + ///Dumps to vector of numbers + vector dumpVertexData() { + vector dump(8 + dim, 0.0); + dump[0] = utility; + dump[1] = error_cycle; + dump[2] = error; + dump[3] = edgesCount; + dump[4] = nr; + dump[5] = (int) _position_owner; + dump[6] = dim; + dump[7] = extra_data; + for (size_t i = 0; i < dim; ++i) { + dump[i + 8] = position[i]; + } + return dump; + } + //Loads from vector + void loadVertexData(vector & x, double * position_ptr) { + + utility = x[0]; + error_cycle = x[1]; + error = x[2]; + edgesCount = (int) x[3]; + nr = (int) x[4]; + _position_owner = x[5]; + dim = x[6]; + extra_data = x[7]; + position = position_ptr; + for (size_t i = 0; i < dim; ++i) { + position[i] = x[i + 8]; + } + } + void loadVertexData(vector::iterator & itr, int gng_dim, + double * position_ptr) { + vector dump; + dump.reserve(8 + gng_dim); + + std::copy(itr + 1, itr + 9 + gng_dim, std::back_inserter(dump)); + std::advance(itr, (8 + gng_dim)); + + this->loadVertexData(dump, position_ptr); + } + +}; + +#endif /* SHGRAPHDEFS_H */ diff --git a/inst/include/gng/gng_server.h b/inst/include/gng/gng_server.h new file mode 100644 index 00000000..088977ae --- /dev/null +++ b/inst/include/gng/gng_server.h @@ -0,0 +1,158 @@ +/* + * File: GNGServer.h + * Author: staszek + * + * Created on October 17, 2013, 8:12 PM + */ +#ifndef GNGSERVER_H +#define GNGSERVER_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "utils/threading.h" +#include "utils/utils.h" + + +#ifdef RCPP_INTERFACE +#include +using namespace Rcpp; +using namespace arma; +#endif + +static int gng_server_count=0; + +using namespace gmum; + +/** Holds together all logic and objects.*/ +class GNGServer { +public: + boost::shared_ptr m_logger; + + /**Construct GNGServer using configuration*/ + GNGServer(GNGConfiguration configuration, std::istream * input_graph); + + GNGServer(std::string filename); + + void run(); + + void pause(); + + bool hasStarted() const; + + void terminate(); + + void setVerbosity(int verbosity) { + this->m_logger->verbosity = verbosity; + } + + double nodeDistance(int id1, int id2) const; + + void save(std::string filename); + + unsigned int getCurrentIteration() const; + + ///Exports GNG state to file + void exportToGraphML(std::string filename); + + ///Insert examples + void insertExamples(double * positions, double * extra, + double * probability, unsigned int count, unsigned int dim); + + + unsigned getDatasetSize() const; + unsigned getGNGErrorIndex() const; + bool isRunning() const; + vector getMeanErrorStatistics(); + unsigned int getNumberNodes() const; + double getMeanError(); + GNGConfiguration getConfiguration(); + GNGAlgorithm & getAlgorithm(); + GNGGraph & getGraph(); + GNGDataset & getDatabase(); + + ~GNGServer(); + +#ifdef RCPP_INTERFACE + //Constructor needed for RCPPInterface + GNGServer(GNGConfiguration * configuration); + + ///Moderately slow function returning node descriptors + Rcpp::List getNode(int index); + + int Rpredict(Rcpp::NumericVector & r_ex); + + Rcpp::NumericVector RgetClustering(); + + Rcpp::NumericVector RgetErrorStatistics(); + + void RinsertExamples(Rcpp::NumericMatrix & r_points); + + void RinsertLabeledExamples(Rcpp::NumericMatrix & r_points, + Rcpp::NumericVector r_extra); + + //This is tricky - used only by convertToIGraph in R, because + //it might happen that we delete nodes and have bigger index of the last node + //than actual nodes (especially in the case of utility version of GNG) + unsigned int _getLastNodeIndex() const; + + ///Calls updateClustering on the GNGAlgorithm object + void _updateClustering(); +#endif + +private: + int m_index; + + bool m_running_thread_created; + + gmum::gmum_thread * algorithm_thread; + + /** Mutex used for synchronization of graph access*/ + gmum::recursive_mutex grow_mutex; + /** Mutex used for synchronization of graph access*/ + gmum::recursive_mutex database_mutex; + /** Mutex used for synchronization of graph access*/ + gmum::recursive_mutex stat_mutex; + + GNGConfiguration current_configuration; + + std::auto_ptr gngAlgorithm; + std::auto_ptr gngGraph; + std::auto_ptr gngDataset; + + //Called from constructors + void init(GNGConfiguration configuration, std::istream * input_graph = 0); + +private: + /** Run GNG Server - runs in separate thread and returns control + * @note Runs one extra threads for communication. + */ + static void _run(void * server) { + // This shouldn't be necessary but R behaves strangely in this matter. + GNGServer * gng_server = (GNGServer*) server; + try { + DBG_PTR(gng_server->m_logger,10, "GNGServer::run::proceeding to algorithm"); + gng_server->getAlgorithm().runAlgorithm(); + } catch (std::exception & e) { + DBG_PTR(gng_server->m_logger,10, e.what()); + } + } + +public: + //legacy code + static GNGServer * constructTestServer(GNGConfiguration config) { + return new GNGServer(config, 0 /*input_graph*/); + } +}; + +#endif +/* GNGSERVER_H */ diff --git a/inst/include/gng/heap.h b/inst/include/gng/heap.h new file mode 100644 index 00000000..19d598fb --- /dev/null +++ b/inst/include/gng/heap.h @@ -0,0 +1,206 @@ +/* + * File: Heap.h + * Author: staszek + * + * Created on August 21, 2012, 3:40 AM + */ + +#ifndef HEAP_H +#define HEAP_H + +#include "utils/utils.h" +#include + +namespace gmum { + +template +class Heap { +public: + + struct HeapNode { + int key; + T val; + HeapNode() { + } + HeapNode(const T& val, int key) : + key(key), val(val) { + } + bool operator>(const HeapNode & rhs) { + return val > rhs.val; + } + bool operator<(const HeapNode & rhs) { + return val < rhs.val; + } + bool operator>=(const HeapNode & rhs) { + return val >= rhs.val; + } + bool operator<=(const HeapNode & rhs) { + return val <= rhs.val; + } + friend std::ostream & operator<<(std::ostream & out, + const HeapNode & rhs) { + out << rhs.val; + return out; + } + }; + std::vector m_heap; + int m_size; + + void checkHeapSize() { + if (m_size == SIZE(m_heap)) + m_heap.resize(2 * (int) m_heap.size()); + } + + int parent(int i) { + return (i - 1) / 2; //shift + } + int leftChild(int i) { + return 2 * i + 1; //shift + } + int rightChild(int i) { + return 2 * i + 2; //shift + } + + bool operator=(const Heap & rhs) { + return true; + } +public: + + bool isEmpty() const { + return (int) m_heap.size() == 0; + } + + int getSize() const { + return m_size; + } + + Heap(int size = 100) : m_size(0) { + m_heap.resize(size); + } + + ~Heap() { + for (int i = 0; i < m_size; ++i) { + delete m_heap[i]; + } + } + + + int moveUp(int i, HeapNode * node) { + if (m_size == 0) + return -1; + + while (i > 0 && *node > *(m_heap[parent(i)])) { + m_heap[i] = m_heap[parent(i)]; + + m_heap[i]->key = i; + + i = parent(i); + } + + m_heap[i] = node; + m_heap[i]->key = i; + return i; + } + int moveDown(int i) { + if (m_size == 0) + return -1; + + HeapNode * tmp; + int j = 0; + + if (m_size > 1) + while (i <= parent(m_size - 1)) { + + j = leftChild(i); + if (j != (m_size - 1) && *(m_heap[j + 1]) > *(m_heap[j])) + ++j; //right child + + if (*(m_heap[j]) > *(m_heap[i])) { + tmp = m_heap[i]; + m_heap[i] = m_heap[j]; + m_heap[j] = tmp; + m_heap[i]->key = i; + m_heap[j]->key = j; + i = j; + } else + break; + } + + m_heap[i]->key = i; + + return i; + } + + //zwraca wskaznik do struktury w heap - uzywane do remove w O(1) co jest konieczne tutaj + void* insert(const T& val) { + checkHeapSize(); + HeapNode * node = new HeapNode(val, SIZE(m_heap)); + m_heap[m_size++] = node; + + int i = moveUp(m_size - 1, node); + + return reinterpret_cast(m_heap[i] = node); + } + + bool check(int i) const { + if (m_size == 0) + return false; + + bool r = true, l = true; + + if (leftChild(i) < m_size) { + l = *m_heap[leftChild(i)] <= *m_heap[i] && check(leftChild(i)); + } + if (rightChild(i) < m_size) { + r = *m_heap[rightChild(i)] <= *m_heap[i] && check(rightChild(i)); + } + return l && r; + } + + void remove(void * ptr) { + HeapNode * node = reinterpret_cast(ptr); + //delete node; + + int i = node->key; + + //memory leak + + m_heap[i] = m_heap[m_size - 1]; + m_heap[i]->key = i; + --m_size; + + node = m_heap[i]; + + if (i != 0 && *node > *m_heap[parent(i)]) + moveUp(i, node); + else { + if ((leftChild(i) < m_size && *m_heap[leftChild(i)] > *node) + || (rightChild(i) < m_size && *m_heap[rightChild(i)] > *node)) { + moveDown(i); + } + } + + delete reinterpret_cast(ptr); + } + + T extractMax() { + if (m_size == 0) + throw "Zero sized Heap max extraction?"; + + HeapNode * tmp = m_heap[0]; + T extracted = tmp->val; + + delete tmp; + + m_heap[0] = m_heap[m_size - 1]; + --m_size; + + moveDown(0); + + return extracted; + } + +}; +} +#endif /* HEAP_H */ + diff --git a/inst/include/gng/uniform_grid.h b/inst/include/gng/uniform_grid.h new file mode 100644 index 00000000..3a75b80f --- /dev/null +++ b/inst/include/gng/uniform_grid.h @@ -0,0 +1,177 @@ +/* + * File: UniformGrid.h + * Author: staszek + * + * Created on August 19, 2012, 7:02 AM + */ + +#ifndef UNIFORM_GRID_H +#define UNIFORM_GRID_H + +#include +#include +#include +#include +#include +#include + +using namespace std; + +namespace gmum { +/** + * Is not thread safe !! Very important (speed purposes) + */ +template +class UniformGrid { + typedef ListContainer Node; + typedef VectorContainer NodeArray; + +public: + UniformGrid(double * origin, int *dim, int gng_dim, double m_grow_factor = + 1.5, double m_density_threshold = 2.0, + double m_density_threshold_min = 0.4, + boost::shared_ptr logger = boost::shared_ptr()); + + UniformGrid(double * origin, double *axis, double l, int gng_dim, + double m_grow_factor = 1.5, double m_density_threshold = 2.0, + double m_density_threshold_min = 0.4, + boost::shared_ptr logger = boost::shared_ptr()); + + vector findNearest(const double *p, int n = 2); + + // Calculates new size given growth factor + long int calculate_new_size(double *origin, double *axis, double l); + + void purge(double *origin, int* dim, double l); + + void purge(double *origin, double *axis, double l); + + void new_l(double l); + + int insert(double *p, T x); + + bool remove(double *p); + + // Calculates if growing the grid will payoff + bool check_grow(); + + double getCellLength() const { + return m_l; + } + + double getDensity() const { + return m_density; + } + + int getCapacity() const { + return SIZE(m_grid); + } + + int getNodes() const { + return m_nodes; + } + + int getDimension(int axis) const { + return m_dim[axis]; + } + + void setDistFunction(double (*dist_fnc)(T, double*)) { + m_dist_fnc = dist_fnc; + } + + ~UniformGrid(); + +private: + // Check if search was successful + bool searchSuccessful(double min_dist = -1); + + void scanCell(int k, double* query); + + void crawl(int current_dim, int fixed_dim); + + bool scanCorners(); + + T find(double *p); + +private: + /*** Maximum size pass which UG won't grow */ + static const int MAX_SIZE = 1000000; + + NodeArray m_grid; + + //global variables for search query + int s_found_cells[4]; + double s_found_cells_dist[4]; + int s_search_query; + + int *s_center; + int s_radius; + int *s_pos; + double *s_query; + + double (*m_dist_fnc)(T, double*); //distance function; + + double m_l; + double m_h; + + double m_density; + double m_density_threshold; + double m_density_threshold_min; + double m_grow_factor; + + int neighbourhood_size; //=3^d + + int gng_dim; + + //TODO: erase GNG_MAX_DIM + double m_axis[GNG_MAX_DIM]; + + int m_nodes; + + boost::shared_ptr m_logger; + + int* m_dim; //number of uniform cells along certain axis + + int* m_tmp_int; //avoid alloc on heap all the time in calccell <- one thread! + + vector m_neigh; + + //TODO: erase GNG_MAX_DIM + double m_origin[GNG_MAX_DIM]; + + int getIndex(int *p) { + int value = p[0]; + double mul = m_dim[0]; + for (int i = 1; i < this->gng_dim; ++i) { + value += p[i] * mul; + mul *= m_dim[i]; + } + + return value; + } + int * calculateCell(const double *p) { + //int * m_tmp_int = new int[this->gng_dim]; + for (int i = 0; i < this->gng_dim; ++i) { + m_tmp_int[i] = (int) ((p[i] - m_origin[i]) / m_l); + } + return &m_tmp_int[0]; + } + + bool _inside(int x) { + return (x) >= 0 && (x) < m_grid.size(); + } + + bool isZero(double x) { + return x > -EPS && x < EPS; + } + + unsigned int calculate_cell_side(double axis, double l, int old_dim) { + return max(old_dim + 1, (int) ((axis) / (l)) + 1); + } +}; + +#include + +} + +#endif diff --git a/inst/include/gng/uniform_grid.hpp b/inst/include/gng/uniform_grid.hpp new file mode 100644 index 00000000..e2eac167 --- /dev/null +++ b/inst/include/gng/uniform_grid.hpp @@ -0,0 +1,404 @@ +/* + * File: UniformGrid.hpp + * Author: staszek + * + * Created on September 3, 2012, 9:03 AM + */ +using namespace gmum; + + +template +void UniformGrid::new_l(double l) { + double *org = new double[this->gng_dim]; + double *axis = new double[this->gng_dim]; + memcpy(org, m_origin, this->gng_dim * sizeof(double)); + memcpy(axis, m_axis, this->gng_dim * sizeof(double)); + purge(org, axis, l); + delete[] org; + delete[] axis; +} +template +void UniformGrid::purge(double *origin, + double *axis_array, double l) { + int * dim = new int[this->gng_dim]; + memcpy(m_axis, axis_array, sizeof(double) * this->gng_dim); + + // If length has not been specified, it means that we want to have minimum number of cells possible + // Which amounts to picking the smallest axis + if (l == -1.0) { + double l_min = axis_array[0]; + REP(i, this->gng_dim) + l_min = min(l_min, axis_array[0]); + l = l_min * 1.01; // For numerical stability + } + + REP(i, this->gng_dim) + { + dim[i] = calculate_cell_side(axis_array[i], l, m_dim[i]); + } + + purge(origin, dim, l); + delete[] dim; +} +template +void UniformGrid::purge(double *origin, + int* dim, double l) { + m_l = l; + memcpy(&m_dim[0], dim, sizeof(int) * this->gng_dim); + memcpy(&m_origin, origin, sizeof(double) * this->gng_dim); + + m_density = 0.0; + m_density_threshold = 0.1; + m_grow_factor = 1.5; + m_nodes = 0; + + m_grid.clear(); + + int new_size = 1; + + REP(i, this->gng_dim) + { + new_size *= m_dim[i]; + REPORT(new_size); + REPORT(m_dim[i]); + } + + m_grid.resize(new_size); +} + +template +void UniformGrid::scanCell(int k, + double* query) { + double dist_candidate; + + // Not implemented search routine for > 2 + if (s_search_query != 2) + throw "Not implemented for >2 search query.."; + + BOOST_FOREACH(int node, m_grid[k]) + { + + dist_candidate = m_dist_fnc(node, query); + + // + if (node != s_found_cells[1] + && (s_found_cells_dist[0] < 0 + || dist_candidate <= s_found_cells_dist[0])) { + + //Overwrite worst + s_found_cells_dist[0] = dist_candidate; + s_found_cells[0] = node; + + //Swap it to the right place + for (int j = 1; j < s_search_query; ++j) { + if (s_found_cells_dist[j] < 0 + || dist_candidate <= s_found_cells_dist[j]) { + std::swap(s_found_cells[j], s_found_cells[j - 1]); + std::swap(s_found_cells_dist[j], s_found_cells_dist[j - 1]); + + } + } + + } + + } +} + +template +void UniformGrid::crawl(int current_dim, + int fixed_dim) { + + if (current_dim == fixed_dim) { + if (current_dim >= this->gng_dim - 1) { + scanCell(getIndex(s_pos), s_query); + } //skip current dimension + else + crawl(current_dim + 1, fixed_dim); + } else { + int from, to; + + //skip corners + if (current_dim < fixed_dim) { + from = std::max(s_center[current_dim] - s_radius + 1, 0); + to = std::min(s_center[current_dim] + s_radius - 1, + m_dim[current_dim] - 1); + } else { + from = std::max(s_center[current_dim] - s_radius, 0); + to = std::min(s_center[current_dim] + s_radius, + m_dim[current_dim] - 1); + } + + for (int i = from; i <= to; ++i) { + + s_pos[current_dim] = i; + + if (current_dim == this->gng_dim - 1) { + scanCell(getIndex(s_pos), s_query); + } else { + crawl(current_dim + 1, fixed_dim); + } + } + + } + + return; +} + +/** Uses cached values (s_pos, s_center, s_radius) to search cells */ +template +bool UniformGrid::scanCorners() { + + int left, right; + + bool scanned = false; + + memcpy(s_pos, s_center, sizeof(int) * this->gng_dim); + + REP(i, this->gng_dim) + { + left = s_center[i] - s_radius; + right = s_center[i] + s_radius; + + if (s_center[i] - s_radius >= 0) { + s_pos[i] = left; + crawl(0, i); + scanned = true; + } + if (s_center[i] + s_radius < m_dim[i]) { + s_pos[i] = right; + crawl(0, i); + scanned = true; + } + + s_pos[i] = s_center[i]; + } + + return scanned; +} + +//mutates pos! + +template +int UniformGrid::insert(double *p, T x) { + //memcpy(&m_copy[0],p,sizeof(double)*this->gng_dim); + int * index = calculateCell(p); + int k = getIndex(index); + + if (!_inside(k)) + return 0; + + m_grid[k].push_back(x); + m_nodes++; + m_density = (double) m_nodes / (double) SIZE(m_grid); + return k; +} + +template +std::vector UniformGrid::findNearest( + const double *p, int n) { //returns indexes (values whatever) + s_search_query = n; + + + int * center = calculateCell(p); + + REP(i, n) + { + s_found_cells[i] = -1; + s_found_cells_dist[i] = -1; + } + memcpy(s_center, center, sizeof(int) * this->gng_dim); + + memcpy(s_query, p, sizeof(double) * this->gng_dim); + + int center_id = getIndex(center); + + int size = SIZE(m_grid); + + //Check if inside uniform grid + if (!_inside(center_id)) { + vector returned_value; + returned_value.push_back(-1); + returned_value.push_back(-1); + return returned_value; + + } + double border, border_squared, tmp; + s_found_cells_dist[0] = s_found_cells_dist[1] = -1; + s_found_cells[0] = s_found_cells[1] = -1; + + //init of the search + scanCell(center_id, s_query); + + if (s_found_cells[0] == s_found_cells[1] && s_found_cells[0] != -1) { + DBG_PTR(m_logger, 10, "UniformGrid:: Found two same nodes in one cell!!"); + } + + for (int i = 0; i < this->gng_dim; ++i) { + tmp = abs((p[i] - m_origin[i] - center[i] * m_l)) + < abs((p[i] - m_origin[i] - (center[i] + 1) * m_l)) ? + abs((p[i] - m_origin[i] - center[i] * m_l)) : + abs((p[i] - m_origin[i] - (center[i] + 1) * m_l)); + if (border > tmp || i == 0) + border = tmp; + } + + border_squared = border * border; + + s_radius = 0; + + DBG_PTR(m_logger, 2, + "UniformGird:: scanned straightforward cell proceeding to crawling"); + + //No more cells to search + if (m_grid.size() == 1) { + std::vector ret(2); + ret[1] = s_found_cells[0]; + ret[0] = s_found_cells[1]; + + return ret; + } + + while (!searchSuccessful(border_squared)) { + ++s_radius; + border += m_l; + border_squared = border * border; + + if (!scanCorners()) + break; //false if no cells to check (no cell checked) + } + + std::vector ret(2); + + //Reversed array - see scanCell + ret[1] = s_found_cells[0]; + ret[0] = s_found_cells[1]; + + return ret; +} + +template +bool UniformGrid::remove(double *p) { //returns indexes (values whatever) + int * cell = calculateCell(p); + int index = getIndex(cell); + + for(typename Node::iterator node = m_grid[index].begin(); + node != m_grid[index].end(); ++node) + { + if (isZero(m_dist_fnc(*node, p))) { + m_grid[index].erase(node); + --m_nodes; + return true; + } + } + return false; +} + +template +UniformGrid::~UniformGrid() { + delete[] s_center; + delete[] s_pos; + delete[] s_query; + delete[] m_dim; + delete[] m_tmp_int; +} + +//TODO: extract constructor base +template + UniformGrid::UniformGrid(double * origin, int *dim, int gng_dim, double m_grow_factor, double m_density_threshold, + double m_density_threshold_min, + boost::shared_ptr logger) : + m_dist_fnc(0), gng_dim(gng_dim), m_density_threshold( + m_density_threshold), m_density_threshold_min( + m_density_threshold_min), m_grow_factor(m_grow_factor), m_logger( + logger) { + neighbourhood_size = int(pow(3.0, (double) gng_dim)); + + this->m_density_threshold = m_density_threshold; + this->m_density_threshold_min = m_density_threshold_min; + this->m_grow_factor = m_grow_factor; + + s_center = new int[this->gng_dim]; + + s_pos = new int[this->gng_dim]; + s_query = new double[this->gng_dim]; + m_dim = new int[this->gng_dim]; //number of uniform cells along certain axis + m_tmp_int = new int[this->gng_dim]; //avoid alloc on heap all the time in calccell <- one thread! + + //Zero + for (int i = 0; i < this->gng_dim; ++i) + s_center[i] = s_pos[i] = s_query[i] = m_dim[i] = m_tmp_int[i] = 0; + + + purge(origin, dim, -1.0); +} + +template +bool UniformGrid::searchSuccessful(double min_dist) { + REP(i, s_search_query) + { + if (s_found_cells[i] == -1 || s_found_cells_dist[i] > min_dist) + return false; + } + return true; + } + +template +UniformGrid::UniformGrid(double * origin, double *axis, double l, int gng_dim, + double m_grow_factor, double m_density_threshold, + double m_density_threshold_min, + boost::shared_ptr logger) : + m_dist_fnc(0), gng_dim(gng_dim), m_density_threshold( + m_density_threshold), m_density_threshold_min( + m_density_threshold_min), m_grow_factor(m_grow_factor), m_logger( + logger) { + neighbourhood_size = int(pow(3.0, (double) gng_dim)); + + s_center = new int[this->gng_dim]; + + s_pos = new int[this->gng_dim]; + s_query = new double[this->gng_dim]; + m_dim = new int[this->gng_dim]; //number of uniform cells along certain axis + m_tmp_int = new int[this->gng_dim]; //avoid alloc on heap all the time in calccell <- one thread! + + //Zero + for (int i = 0; i < this->gng_dim; ++i) + s_center[i] = s_pos[i] = s_query[i] = m_dim[i] = m_tmp_int[i] = 0; + + purge(origin, axis, l); + +} + + +template +long int UniformGrid::calculate_new_size(double *origin, double *axis, double l) { + unsigned long int result = 1; + + REP(i, this->gng_dim) + { + result *= calculate_cell_side(axis[i], l, m_dim[i]); + if (result > UniformGrid::MAX_SIZE) + return -1; + } + return result; +} + + +template +bool UniformGrid::check_grow() { + unsigned long int result = this->calculate_new_size(m_origin, m_axis, + m_l / m_grow_factor); + + if (result == -1) + return false; + + double avg_density = m_nodes / (double) m_grid.size(); + double new_avg_density = m_nodes + / (double) (this->calculate_new_size(m_origin, m_axis, + m_l / m_grow_factor)); + + return avg_density > m_density_threshold + && new_avg_density > m_density_threshold_min; +} + + + diff --git a/inst/include/svm/libsvm_runner.h b/inst/include/svm/libsvm_runner.h new file mode 100644 index 00000000..bd53042a --- /dev/null +++ b/inst/include/svm/libsvm_runner.h @@ -0,0 +1,54 @@ +/* + * LibSVMRunner.h + * + * Created on: Apr 7, 2014 + * Author: sacherus + */ +#ifndef LIB_SVM_RUNNER_H +#define LIB_SVM_RUNNER_H + +#include +#include +#include +#include +#include "svm.h" +#include "svm_handler.h" +#include "svm_basic.h" + +class LibSVMRunner: public SVMHandler { +public: + LibSVMRunner(); + virtual ~LibSVMRunner(); + bool save_model_to_config(SVMConfiguration&, svm_parameter*, svm_problem&); + svm_model* load_model_from_config(SVMConfiguration&, svm_parameter*); + /*public interface + * + */ + void processRequest(SVMConfiguration&); + bool canHandle(SVMConfiguration&); + +private: + svm_node** armatlib(arma::mat); // arma to libsvm standard + double* vectlib(arma::vec); //arma vector to lisvm + void arma_prediction(SVMConfiguration&); + svm_parameter* configuration_to_problem(SVMConfiguration&); + svm_node** SparseToSVMNode( + arma::vec& x, int r, arma::Col& rowindex, arma::Col& colindex + ); + svm_node** ArmaSpMatToSvmNode(arma::sp_mat &); + void parseCommandLine(SVMConfiguration&, svm_parameter&); + /// Parse command line args from `config` and store them in `param` + void libraryParseCommandLine( + SVMConfiguration& config, + svm_parameter& param, + int argc, + char** argv, + char *input_file_name, + char *model_file_name + ); + + struct svm_model *model; + struct svm_problem prob; +}; + +#endif /* LIBSVMRUNNER_H_ */ diff --git a/inst/include/svm/svm.h b/inst/include/svm/svm.h new file mode 100644 index 00000000..5ab1b8b5 --- /dev/null +++ b/inst/include/svm/svm.h @@ -0,0 +1,107 @@ +#ifndef LIBSVM_H +#define LIBSVM_H + +#define LIBSVM_VERSION 318 + +#include "utils/logger.h" + +#ifdef __cplusplus +extern "C" { +#endif + +extern int libsvm_version; + +struct svm_node +{ + int index; + double value; +}; + +struct svm_problem +{ + int l; + double *y; + struct svm_node **x; +}; + +enum { C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR }; /* svm_type */ +enum { LINEAR, POLY, RBF, SIGMOID, PRECOMPUTED }; /* kernel_type */ + +struct svm_parameter +{ + int svm_type; + int kernel_type; + int degree; /* for poly */ + double gamma; /* for poly/rbf/sigmoid */ + double coef0; /* for poly/sigmoid */ + + /* these are for training only */ + double cache_size; /* in MB */ + double eps; /* stopping criteria */ + double C; /* for C_SVC, EPSILON_SVR and NU_SVR */ + int nr_weight; /* for C_SVC */ + int *weight_label; /* for C_SVC */ + double* weight; /* for C_SVC */ + double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */ + double p; /* for EPSILON_SVR */ + int shrinking; /* use the shrinking heuristics */ + int probability; /* do probability estimates */ + int max_iter; // gmum.r modification + +}; + +// +// svm_model +// +struct svm_model +{ + struct svm_parameter param; /* parameter */ + int nr_class; /* number of classes, = 2 in regression/one class svm */ + int l; /* total #SV */ + struct svm_node **SV; /* SVs (SV[l]) */ + double **sv_coef; /* coefficients for SVs in decision functions (sv_coef[k-1][l]) */ + double *rho; /* constants in decision functions (rho[k*(k-1)/2]) */ + double *probA; /* pariwise probability information */ + double *probB; + int *sv_indices; /* sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set */ + + /* for classification only */ + + int *label; /* label of each class (label[k]) */ + int *nSV; /* number of SVs for each class (nSV[k]) */ + /* nSV[0] + nSV[1] + ... + nSV[k-1] = l */ + /* XXX */ + int free_sv; /* 1 if svm_model is created by svm_load_model*/ + /* 0 if svm_model is created by svm_train */ + int iter; // gmum.r modification -> sacherus +}; + +struct svm_model *svm_train(const struct svm_problem *prob, const struct svm_parameter *param, Logger &log); +void svm_cross_validation(const struct svm_problem *prob, const struct svm_parameter *param, int nr_fold, double *target, Logger &log); + +int svm_save_model(const char *model_file_name, const struct svm_model *model, Logger &log); +struct svm_model *svm_load_model(const char *model_file_name, Logger &log); + +int svm_get_svm_type(const struct svm_model *model, Logger &log); +int svm_get_nr_class(const struct svm_model *model, Logger &log); +void svm_get_labels(const struct svm_model *model, int *label, Logger &log); +void svm_get_sv_indices(const struct svm_model *model, int *sv_indices, Logger &log); +int svm_get_nr_sv(const struct svm_model *model, Logger &log); +double svm_get_svr_probability(const struct svm_model *model, Logger &log); + +double svm_predict_values(const struct svm_model *model, const struct svm_node *x, double* dec_values, Logger &log); +double svm_predict(const struct svm_model *model, const struct svm_node *x, Logger &log); +double svm_predict_probability(const struct svm_model *model, const struct svm_node *x, double* prob_estimates, Logger &log); + +void svm_free_model_content(struct svm_model *model_ptr, Logger &log); +void svm_free_and_destroy_model(struct svm_model **model_ptr_ptr, Logger &log); +void svm_destroy_param(struct svm_parameter *param, Logger &log); + +const char *svm_check_parameter(const struct svm_problem *prob, const struct svm_parameter *param, Logger &log); +int svm_check_probability_model(const struct svm_model *model, Logger &log); + +#ifdef __cplusplus +} +#endif + +#endif /* _LIBSVM_H */ diff --git a/inst/include/svm/svm_basic.h b/inst/include/svm/svm_basic.h new file mode 100644 index 00000000..e13bfcb5 --- /dev/null +++ b/inst/include/svm/svm_basic.h @@ -0,0 +1,177 @@ +#ifndef SVM_BASIC_H +#define SVM_BASIC_H + +#include +#include + +#ifdef RCPP_INTERFACE +#include +#endif + +#include "utils/logger.h" + +enum KernelType { + _LINEAR, _POLY, _RBF, _SIGMOID // _PRECOMPUTED +}; + + +enum SVMType { + LIBSVM, SVMLIGHT +}; + +enum Preprocess { + TWOE, NONE +}; +// NORM is solely for test purposes + +// Our "input" class containing SVM paramaters and data to be classified +class SVMConfiguration { + +public: + int seed; + + std::string filename; //filename with data + std::string model_filename; + std::string output_filename; + bool prediction; + + std::string error_msg; //if something went wrong, there is msg with error + + SVMType library; + int svm_type; + KernelType kernel_type; + Preprocess preprocess; + + int degree; // for poly + double gamma; // for poly/rbf/sigmoid + double coef0; // for poly/sigmoid + + //these are for training only + double cache_size; // in MB + double eps; // stopping criteria + double C; // for C_SVC, EPSILON_SVR and NU_SVR + double* libsvm_class_weights; // for C_SVC + //which weight is for which class + int* libsvm_class_weights_labels; // for C_SVC + int class_weight_length; // for C_SVC + int shrinking; // use the shrinking heuristics + int probability; // do probability estimates + int max_iter; // when to stop optimization + + int nr_class; /* number of classes, = 2 in regression/one class svm */ + + //libsvm model parameters + //TODO: delete those variables + int *label; /* label of each class (label[k]) */ + int *nSV; /* number of SVs for each class (nSV[k]) */ + /* nSV[0] + nSV[1] + ... + nSV[k-1] = l */ + + /*TODO: neccessery? check what are they doing */ + double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */ + double p; /* for EPSILON_SVR */ + int l; //TODO: remove it in svm_ligth + + + /* SVMLight parameters */ + char *kernel_parm_custom; // Custom kernel parameter(s) + // Transductive learning is supported only by svmlight + bool use_transductive_learning; + double transductive_posratio; + + /* Global "stuff" */ + arma::sp_vec alpha_y; // SVMLight's alpha*y values for SV's + arma::sp_mat support_vectors; ///< Vectors are transposed (column vectors) + arma::mat data; // armadillo matrix and vector (double) + arma::vec target; + arma::vec result, decision_function; + std::string svm_options; ///< SVMLight command line parameters + + // Sparse data + bool sparse; + arma::sp_mat sparse_data; ///< Data is transposed (one example for one column) + + // Class weights + arma::vec class_weights; + bool use_class_weights; + + // Example weights (used by svmlight) + bool use_example_weights; + arma::vec example_weights; + + Logger log; + + arma::sp_mat w; //d + double b; + double pos_target; + double neg_target; + + //2eParameters & Variables + double cov_eps_smoothing_start; + double cov_eps_smoothing_end; + arma::mat inv_of_sqrt_of_cov; + arma::mat tmp_data; + arma::mat tmp_target; + long iter; //number of iterations in svm core + + // debug + bool debug_library_predict; + + + // constructors + SVMConfiguration(); + SVMConfiguration(bool); + + // methods + arma::mat getData(); + void setData(arma::mat); + void setDefaultParams(); + + void setFilename(std::string); + std::string getFilename(); + + void setModelFilename(std::string); + std::string getModelFilename(); + + void setOutputFilename(std::string); + std::string getOutputFilename(); + + void setPrediction(bool); + bool isPrediction(); + + + void setLibrary( std::string ); + void setKernel( std::string ); + void setPreprocess( std::string ); + double getB(); + void setB(double b); + // logger + void set_verbosity( int ); + + void setSeed(int); + + void setSparse(bool sparse); + + /** + * Sets sparse data from CSC sparse matrix format + */ + void setSparseData( + arma::uvec rowind, + arma::uvec colptr, + arma::vec values, + size_t n_rows, + size_t n_cols, + bool one_indexed=false + ); + + //@param class_weights_labels - needed for libsvm + void setClassWeights(arma::vec); + + arma::sp_mat &getSparseData(); + + bool isSparse(); + int getDataDim(); + int getDataExamplesNumber(); + size_t getSVCount(); +}; + +#endif diff --git a/inst/include/svm/svm_client.h b/inst/include/svm/svm_client.h new file mode 100644 index 00000000..06fcc303 --- /dev/null +++ b/inst/include/svm/svm_client.h @@ -0,0 +1,98 @@ +#ifndef SVM_CLIENT_H +#define SVM_CLIENT_H + +#include "svm_handler.h" +#include "svm_basic.h" +#include + +class SVMClient { +private: + std::vector SVMHandlers; + SVMConfiguration &config; + void createFlow(); + + double kernel(size_t data_i, size_t sv_j); + +public: + //constructors + SVMClient(SVMConfiguration*); + + // params setter + void setLibrary(std::string); + void setKernel(std::string); + void setPreprocess(std::string); + + void setCacheSize(double); + void setDegree(int); + void setGamma(double); + void setCoef0(double); + void setC(double); + void setEps(double); + void setShrinking(int); + void setProbability(int); + void setBias(double); + void setW(arma::vec); + void setAlpha(arma::vec); + void setNumberClass(int); + + // additional setters + void setConfiguration(SVMConfiguration *); + + // data getters + arma::mat getX(); + arma::sp_mat getSparseX(); + arma::vec getY(); + arma::vec getPrediction(); + arma::vec getDecisionFunction(); + + // params getters + std::string getLibrary(); + std::string getKernel(); + std::string getPreprocess(); + + double getCacheSize(); + int getDegree(); + double getGamma(); + double getCoef0(); + double getC(); + double getEps(); + bool isShrinking(); + bool isProbability(); + bool isSparse(); + bool areExamplesWeighted(); + arma::vec getExampleWeights(); + arma::vec getClassWeights(); + int getIterations(); + + // model getters + // double** getSV(); // double**, std::vector, arma:mat ? + int getNumberSV(); + int getNumberClass(); + arma::vec getAlpha(); + double getBias(); + arma::vec getW(); + arma::sp_mat getSV(); + + // additional getters + SVMConfiguration &getConfiguration(); + + // runners + void run(); + // Prediction independent of SVMHandlers + void predict(arma::mat); + void predictFromConfig(); + void sparse_predict( + arma::uvec rowind, + arma::uvec colptr, + arma::vec values, + size_t n_rows, + size_t n_cols + ); + + /// Process a request of prediction with a SVMHandlers implementations + void requestPredict(); + void train(); + +}; + +#endif diff --git a/inst/include/svm/svm_handler.h b/inst/include/svm/svm_handler.h new file mode 100644 index 00000000..017a5f99 --- /dev/null +++ b/inst/include/svm/svm_handler.h @@ -0,0 +1,14 @@ +#ifndef SVM_HANDLER_H +#define SVM_HANDLER_H + +#include "svm_basic.h" + +// SVMHandler interface, all blocks will implement this class +class SVMHandler { +public: + //virtual ~SVMHandler(); + virtual void processRequest(SVMConfiguration&) = 0; + virtual bool canHandle(SVMConfiguration&) = 0; +}; + +#endif diff --git a/inst/include/svm/svm_utils.h b/inst/include/svm/svm_utils.h new file mode 100644 index 00000000..0d5f2a99 --- /dev/null +++ b/inst/include/svm/svm_utils.h @@ -0,0 +1,120 @@ +#ifndef SVC_UTILS_H_ +#define SVC_UTILS_H_ +#include +#include +#include + +#ifndef DEBUG_GMUM +#define ASSERT(x) +#else +#define ASSERT(x) \ + if (! (x)) \ + { \ + cout << "ERROR!! Assert " << #x << " failed\n"; \ + cout << " on line " << __LINE__ << "\n"; \ + cout << " in file " << __FILE__ << "\n"; \ + } +#endif + + +using namespace std; + +//TODO: clean after consolidating svm_utils.h +//and log.h with utils/logger.h and utils/utils.h +template +std::string svm_to_str(const T& x) { + stringstream ss; + ss << x; + return ss.str(); +} + +class SvmUtils { +private: + SvmUtils(); + virtual ~SvmUtils(); +public: + + static double sqrtInvMat(arma::mat &matrix, arma::mat &finalMat, double cov_eps_smoothing_start = 0) { + arma::vec eigenValue; + arma::mat eigenVector; + arma::mat diagonalMat; + arma::mat inverse; + double mu = arma::trace(matrix) / matrix.n_rows; + //trying to inverse 0 matrix + mu = mu == 0 ? cov_eps_smoothing_start : mu; + double cov_eps_smoothing_end = cov_eps_smoothing_start; + bool not_singular = false; + while(!not_singular) { + not_singular = inv_sympd(inverse,matrix); + matrix = (1-cov_eps_smoothing_end) * matrix + + mu * cov_eps_smoothing_end * arma::eye(matrix.n_cols, matrix.n_cols); + cov_eps_smoothing_end *= 2; + } + arma::eig_sym(eigenValue, eigenVector,inverse); + finalMat = eigenVector * arma::sqrt(arma::diagmat(eigenValue)) * eigenVector.t(); + return cov_eps_smoothing_end; + } + + //convert sparse matrix to armadillo matrix + static arma::mat libtoarma(svm_node** svm_nodes, int nr_sv, int dim) { + arma::mat ret(nr_sv, dim, arma::fill::zeros); + + for (int row = 0; row < nr_sv; row++) { + svm_node* tmp_row = svm_nodes[row]; + for (int j = 0; tmp_row[j].index != -1; j++) { + ret(row, tmp_row[j].index - 1) = tmp_row[j].value; + } + } + return ret; + } + + //TODO: resize ret matrix + // FIXME: Deprecated? + static void libToArma(svm_node** svm_nodes, int nr_sv, int dim, arma::mat& ret) { + //TODO: resize ret matrix + // arma::mat ret(nr_sv, dim); + + for (int row = 0; row < nr_sv; row++) { + + svm_node* tmp_row = svm_nodes[row]; + for (int j = 0; tmp_row[j].index != -1; j++) { + ret(row, tmp_row[j].index - 1) = tmp_row[j].value; + } + } + } + + //Constructs effciently CSR matrix containing SVs + static arma::sp_mat SvmNodeToArmaSpMat( + svm_node** svm_nodes, int nr_sv, int dim + ); + + static arma::vec arrtoarmavec(double* arr, int size) { + arma::vec ret(size); + for (int i = 0; i < size; i++) { + ret(i) = arr[i]; + } + return ret; + } + +// TODO: MAKE IT WORKING FOR MULTICLASS + static arma::vec arrtoarmavec(double** arr, int size) { + return arrtoarmavec(arr[0], size); + } + + static arma::mat matrixByValue(arma::mat &data, arma::vec &targets, + double value) { + return data.rows(find(targets == value)); + } + + //static void ArmaVec2 + +}; + +template +static std::string to_string(T const& value) { + std::stringstream sstr; + sstr << value; + return sstr.str(); +} + +#endif /* SVC_UTILS_H_ */ diff --git a/inst/include/svm/svmlight_runner.h b/inst/include/svm/svmlight_runner.h new file mode 100644 index 00000000..17a1500c --- /dev/null +++ b/inst/include/svm/svmlight_runner.h @@ -0,0 +1,158 @@ +/** + * @file svmlight_runner.h + * @brief SVMLight implementation class of SVMHandler headers + * @author Konrad Talik + * @copyright GPLv3 + */ + +#ifndef SVMLIGHT_RUNNER_H +#define SVMLIGHT_RUNNER_H + +#include + +#include "svm_handler.h" + + +extern "C" { +#include "svmlight/svm_common.h" +#include "svmlight/svm_learn.h" +} + +/// SVMLight library implementation of SVMHandler +class SVMLightRunner: public SVMHandler { + +public: + + /// Default constructor + SVMLightRunner(); + + /// Default destructor + ~SVMLightRunner(); + + // Documented in the parent class + bool canHandle(SVMConfiguration &); + + // Documented in the parent class + void processRequest( + SVMConfiguration & + ); + +protected: + + /** + * Store numeric results as user-defined labels + */ + void resultsToLabels(SVMConfiguration &); + + /** + * Clear globals that are assuming we just started command-line svmlight + */ + void _clear(); + + /** @name Library functionalities wrappers + * Following methods and fields are direct library functionalities + * wrappers with file operations parametrized (`bool use_gmumr`). + */ + /// @{ + + /* These strings are not used by us (we are operating only on memory) + * but let's keep them before removing unnecessary code */ + char docfile[200]; /* file with training examples */ + char modelfile[200]; /* file for resulting classifier */ + char restartfile[200]; /* file with initial alphas */ + char predictionsfile[200]; + + /** + * SVMLight's `svm_learn` main method + * @author Thorsten Joachims + */ + int librarySVMLearnMain(int argc, char **argv, bool use_gmumr, + SVMConfiguration &config + ); + + /** + * SVMLight's `svm_learn` auxiliary method + * @author Thorsten Joachims + */ + void librarySVMLearnReadInputParameters( + int argc, char *argv[], char *docfile, char *modelfile, + char *restartfile, long *verbosity, LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, bool use_gmumr, SVMConfiguration &config + ); + + /** + * SVMLight's `svm_classify` main method + * @author Thorsten Joachims + */ + int librarySVMClassifyMain(int argc, char **argv, bool use_gmumr, + SVMConfiguration &config + ); + + /** + * SVMLight's `svm_classify` auxiliary method + * @author Thorsten Joachims + */ + void librarySVMClassifyReadInputParameters( + int argc, char **argv, char *docfile, char *modelfile, + char *predictionsfile, long int *verbosity, long int *pred_format, + bool use_gmumr, SVMConfiguration &config + ); + + /** + * SVMLight's auxiliary method + * @author Thorsten Joachims + */ + MODEL * libraryReadModel( + char *modelfile, bool use_gmumr, SVMConfiguration &config + ); + + /** + * SVMLight's auxiliary method + * @author Thorsten Joachims + */ + void libraryReadDocuments( + char *docfile, DOC ***docs, double **label, long int *totwords, + long int *totdoc, bool use_gmumr, SVMConfiguration &config + ); + + /** + * SVMLight's auxiliary method + * @author Thorsten Joachims + */ + void libraryWaitAnyKey(); + + /** + * SVMLight's auxiliary method + * @author Thorsten Joachims + */ + void libraryPrintHelp(); + + /// @} + + /** + * Convert SVMConfiguration to one line of SVMLight's `svm_learn` input + */ + std::string SVMConfigurationToSVMLightLearnInputLine( + SVMConfiguration &config, long int line_num + ); + + /** + * Convert SVMConfiguration to one line of SVMLight's `svm_classify` input + * of every Support Vector in one line (starting with alpha*y) + */ + char * SVMConfigurationToSVMLightModelSVLine( + SVMConfiguration &config, long int line_num + ); + + /** + * Store SVMLight model data into SVMConfiguration + * NOTE: It does not clean unnecessary data! + */ + void SVMLightModelToSVMConfiguration( + MODEL *model, SVMConfiguration &config + ); +}; + +/* SVMLIGHT_RUNNER_H */ +#endif + diff --git a/inst/include/svm/two_e_svm_post.h b/inst/include/svm/two_e_svm_post.h new file mode 100644 index 00000000..69f38c42 --- /dev/null +++ b/inst/include/svm/two_e_svm_post.h @@ -0,0 +1,16 @@ + +#ifndef __TWOESVM_H_POST__ +#define __TWOESVM_H_POST__ + +#include "svm_handler.h" +#include "svm_basic.h" +#include + +class TwoeSVMPostprocessor: public SVMHandler { +public: + arma::rowvec projectingData(arma::mat &matrix, arma::rowvec &weights); + void processRequest(SVMConfiguration&); + bool canHandle(SVMConfiguration&); +}; + +#endif diff --git a/inst/include/svm/two_e_svm_pre.h b/inst/include/svm/two_e_svm_pre.h new file mode 100644 index 00000000..fab83450 --- /dev/null +++ b/inst/include/svm/two_e_svm_pre.h @@ -0,0 +1,23 @@ +#ifndef __TWOESVM_H_PRE__ +#define __TWOESVM_H_PRE__ + +#include "svm_handler.h" +#include "svm_basic.h" +#include + + +//2eSVM Preprocessor +class TwoeSVMPreprocessor: public SVMHandler { +private: + arma::mat computeTransMat(arma::mat &covPosMat, arma::mat &covNegMat); + void sqrtInvMat(const arma::mat &matrix, arma::mat &finalMat); + + +public: + void processRequest(SVMConfiguration&); + bool canHandle(SVMConfiguration&); + void makePreprocessor(); + arma::mat cov0InvSqrt; +}; + +#endif diff --git a/inst/include/svmlight/.gitignore b/inst/include/svmlight/.gitignore new file mode 100644 index 00000000..b52c6694 --- /dev/null +++ b/inst/include/svmlight/.gitignore @@ -0,0 +1,4 @@ +# Binaries +svm_classify +svm_learn + diff --git a/inst/include/svmlight/LICENSE.txt b/inst/include/svmlight/LICENSE.txt new file mode 100644 index 00000000..28d6db09 --- /dev/null +++ b/inst/include/svmlight/LICENSE.txt @@ -0,0 +1,59 @@ +SVM-Light +--------- + +Available at http://svmlight.joachims.org/ + +Author: Thorsten Joachims + thorsten@joachims.org + + Cornell University + Department of Computer Science + 4153 Upson Hall + Ithaca, NY 14853 + USA + +LICENSING TERMS + +This program is granted free of charge for research and education +purposes. However you must obtain a license from the author to use it +for commercial purposes. + +Scientific results produced using the software provided shall +acknowledge the use of SVM-Light. Please cite as + + T. Joachims, Making large-Scale SVM Learning + Practical. Advances in Kernel Methods - Support Vector + Learning, B. Schölkopf and C. Burges and A. Smola (ed.), + MIT-Press, 1999. + http://www-ai.cs.uni-dortmund.de/DOKUMENTE/joachims_99a.pdf + +Moreover shall the author of SVM-Light be informed about the +publication. + +The software must not be modified and distributed without prior +permission of the author. + +By using SVM-Light you agree to the licensing terms. + + +NO WARRANTY + +BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT +WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER +PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, +EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF +THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO +LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY +OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED +OF THE POSSIBILITY OF SUCH DAMAGES. diff --git a/inst/include/svmlight/Makefile b/inst/include/svmlight/Makefile new file mode 100644 index 00000000..d3367391 --- /dev/null +++ b/inst/include/svmlight/Makefile @@ -0,0 +1,111 @@ +# +# makefile for svm_light +# +# Thorsten Joachims, 2002 +# + +#Use the following to compile under unix or cygwin +CC = gcc +LD = gcc + +#Uncomment the following line to make CYGWIN produce stand-alone Windows executables +#SFLAGS= -mno-cygwin + +# @option -fPIC: +# R devtools compilation issue: +# /usr/bin/ld: ../inst/include/svm/svmlight/svm_learn.o: relocation R_X86_64_32 against `.rodata.str1.1' can not be used when making a shared object; recompile with -fPIC +SFLAGS= -fPIC + +CFLAGS= $(SFLAGS) -O3 # release C-Compiler flags +LFLAGS= $(SFLAGS) -O3 # release linker flags +#CFLAGS= $(SFLAGS) -pg -Wall -pedantic # debugging C-Compiler flags +#LFLAGS= $(SFLAGS) -pg # debugging linker flags +LIBS=-L. -lm # used libraries + +all: svm_learn_hideo svm_classify + +tidy: + rm -f *.o + rm -f pr_loqo/*.o + +clean: tidy + rm -f svm_learn + rm -f svm_classify + rm -f libsvmlight.so + +help: info + +info: + @echo + @echo "make for SVM-light Thorsten Joachims, 1998" + @echo + @echo "Thanks to Ralf Herbrich for the initial version." + @echo + @echo "USAGE: make [svm_learn | svm_learn_loqo | svm_learn_hideo | " + @echo " libsvmlight_hideo | libsvmlight_loqo | " + @echo " svm_classify | all | clean | tidy]" + @echo + @echo " svm_learn builds the learning module (prefers HIDEO)" + @echo " svm_learn_hideo builds the learning module using HIDEO optimizer" + @echo " svm_learn_loqo builds the learning module using PR_LOQO optimizer" + @echo " svm_classify builds the classfication module" + @echo " libsvmlight_hideo builds shared object library that can be linked into" + @echo " other code using HIDEO" + @echo " libsvmlight_loqo builds shared object library that can be linked into" + @echo " other code using PR_LOQO" + @echo " all (default) builds svm_learn + svm_classify" + @echo " clean removes .o and target files" + @echo " tidy removes .o files" + @echo + +# Create executables svm_learn and svm_classify + +svm_learn_hideo: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o + $(LD) $(LFLAGS) svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o -o svm_learn $(LIBS) + +#svm_learn_loqo: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o loqo +# $(LD) $(LFLAGS) svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o pr_loqo/pr_loqo.o -o svm_learn $(LIBS) + +svm_classify: svm_classify.o svm_common.o + $(LD) $(LFLAGS) svm_classify.o svm_common.o -o svm_classify $(LIBS) + + +# Create library libsvmlight.so, so that external code can get access to the +# learning and classification functions of svm-light by linking this library. + +svm_learn_hideo_noexe: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o + +libsvmlight_hideo: svm_learn_main.o svm_learn.o svm_common.o svm_hideo.o + $(LD) -shared svm_learn.o svm_common.o svm_hideo.o -o libsvmlight.so + +#svm_learn_loqo_noexe: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o loqo + +#libsvmlight_loqo: svm_learn_main.o svm_learn.o svm_common.o svm_loqo.o +# $(LD) -shared svm_learn.o svm_common.o svm_loqo.o pr_loqo/pr_loqo.o -o libsvmlight.so + +# Compile components + +svm_hideo.o: svm_hideo.c + $(CC) -c $(CFLAGS) svm_hideo.c -o svm_hideo.o -I ../inst/include/svmlight + +#svm_loqo.o: svm_loqo.c +# $(CC) -c $(CFLAGS) svm_loqo.c -o svm_loqo.o + +svm_common.o: svm_common.c svm_common.h kernel.h + $(CC) -I ../inst/include/svmlight -c $(CFLAGS) svm_common.c -o svm_common.o + +svm_learn.o: svm_learn.c svm_common.h + $(CC) -c $(CFLAGS) svm_learn.c -o svm_learn.o -I ../inst/include/svmlight + +svm_learn_main.o: svm_learn_main.c svm_learn.h svm_common.h + $(CC) -c $(CFLAGS) svm_learn_main.c -o svm_learn_main.o -I ../inst/include/svmlight + +svm_classify.o: svm_classify.c svm_common.h kernel.h + $(CC) -c $(CFLAGS) svm_classify.c -o svm_classify.o -I ../inst/include/svmlight + + +#loqo: pr_loqo/pr_loqo.o + +#pr_loqo/pr_loqo.o: pr_loqo/pr_loqo.c +# $(CC) -c $(CFLAGS) pr_loqo/pr_loqo.c -o pr_loqo/pr_loqo.o + diff --git a/inst/include/svmlight/kernel.h b/inst/include/svmlight/kernel.h new file mode 100644 index 00000000..0133b006 --- /dev/null +++ b/inst/include/svmlight/kernel.h @@ -0,0 +1,40 @@ +/************************************************************************/ +/* */ +/* kernel.h */ +/* */ +/* User defined kernel function. Feel free to plug in your own. */ +/* */ +/* Copyright: Thorsten Joachims */ +/* Date: 16.12.97 */ +/* */ +/************************************************************************/ + +/* KERNEL_PARM is defined in svm_common.h The field 'custom' is reserved for */ +/* parameters of the user defined kernel. You can also access and use */ +/* the parameters of the other kernels. Just replace the line + return((double)(1.0)); + with your own kernel. */ + + /* Example: The following computes the polynomial kernel. sprod_ss + computes the inner product between two sparse vectors. + + return((CFLOAT)pow(kernel_parm->coef_lin*sprod_ss(a->words,b->words) + +kernel_parm->coef_const,(double)kernel_parm->poly_degree)); + */ + +/* If you are implementing a kernel that is not based on a + feature/value representation, you might want to make use of the + field "userdefined" in SVECTOR. By default, this field will contain + whatever string you put behind a # sign in the example file. So, if + a line in your training file looks like + + -1 1:3 5:6 #abcdefg + + then the SVECTOR field "words" will contain the vector 1:3 5:6, and + "userdefined" will contain the string "abcdefg". */ + +double custom_kernel(KERNEL_PARM *kernel_parm, SVECTOR *a, SVECTOR *b) + /* plug in you favorite kernel */ +{ + return((double)(1.0)); +} diff --git a/inst/include/svmlight/svm_classify.c b/inst/include/svmlight/svm_classify.c new file mode 100644 index 00000000..0b0333b0 --- /dev/null +++ b/inst/include/svmlight/svm_classify.c @@ -0,0 +1,197 @@ +/***********************************************************************/ +/* */ +/* svm_classify.c */ +/* */ +/* Classification module of Support Vector Machine. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.02 */ +/* */ +/* Copyright (c) 2002 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/************************************************************************/ + +# include "svm_common.h" + +char docfile[200]; +char modelfile[200]; +char predictionsfile[200]; + +void read_input_parameters(int, char **, char *, char *, char *, long *, + long *); +void print_help(void); + + +int main (int argc, char* argv[]) +{ + DOC *doc; /* test example */ + WORD *words; + long max_docs,max_words_doc,lld; + long totdoc=0,queryid,slackid; + long correct=0,incorrect=0,no_accuracy=0; + long res_a=0,res_b=0,res_c=0,res_d=0,wnum,pred_format; + long j; + double t1,runtime=0; + double dist,doc_label,costfactor; + char *line,*comment; + FILE *predfl,*docfl; + MODEL *model; + + read_input_parameters(argc,argv,docfile,modelfile,predictionsfile, + &verbosity,&pred_format); + + nol_ll(docfile,&max_docs,&max_words_doc,&lld); /* scan size of input file */ + max_words_doc+=2; + lld+=2; + + line = (char *)my_malloc(sizeof(char)*lld); + words = (WORD *)my_malloc(sizeof(WORD)*(max_words_doc+10)); + + model=read_model(modelfile); + + if(model->kernel_parm.kernel_type == 0) { /* linear kernel */ + /* compute weight vector */ + add_weight_vector_to_linear_model(model); + } + + if(verbosity>=2) { + printf("Classifying test examples.."); fflush(stdout); + } + + if ((docfl = fopen (docfile, "r")) == NULL) + { perror (docfile); exit (1); } + if ((predfl = fopen (predictionsfile, "w")) == NULL) + { perror (predictionsfile); exit (1); } + + while((!feof(docfl)) && fgets(line,(int)lld,docfl)) { + if(line[0] == '#') continue; /* line contains comments */ + parse_document(line,words,&doc_label,&queryid,&slackid,&costfactor,&wnum, + max_words_doc,&comment); + totdoc++; + if(model->kernel_parm.kernel_type == 0) { /* linear kernel */ + for(j=0;(words[j]).wnum != 0;j++) { /* Check if feature numbers */ + if((words[j]).wnum>model->totwords) /* are not larger than in */ + (words[j]).wnum=0; /* model. Remove feature if */ + } /* necessary. */ + doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0)); + t1=get_runtime(); + dist=classify_example_linear(model,doc); + runtime+=(get_runtime()-t1); + free_example(doc,1); + } + else { /* non-linear kernel */ + doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0)); + t1=get_runtime(); + dist=classify_example(model,doc); + runtime+=(get_runtime()-t1); + free_example(doc,1); + } + if(dist>0) { + if(pred_format==0) { /* old weired output format */ + fprintf(predfl,"%.8g:+1 %.8g:-1\n",dist,-dist); + } + if(doc_label>0) correct++; else incorrect++; + if(doc_label>0) res_a++; else res_b++; + } + else { + if(pred_format==0) { /* old weired output format */ + fprintf(predfl,"%.8g:-1 %.8g:+1\n",-dist,dist); + } + if(doc_label<0) correct++; else incorrect++; + if(doc_label>0) res_c++; else res_d++; + } + if(pred_format==1) { /* output the value of decision function */ + fprintf(predfl,"%.8g\n",dist); + } + if((int)(0.01+(doc_label*doc_label)) != 1) + { no_accuracy=1; } /* test data is not binary labeled */ + if(verbosity>=2) { + if(totdoc % 100 == 0) { + printf("%ld..",totdoc); fflush(stdout); + } + } + } + fclose(predfl); + fclose(docfl); + free(line); + free(words); + free_model(model,1); + + if(verbosity>=2) { + printf("done\n"); + +/* Note by Gary Boone Date: 29 April 2000 */ +/* o Timing is inaccurate. The timer has 0.01 second resolution. */ +/* Because classification of a single vector takes less than */ +/* 0.01 secs, the timer was underflowing. */ + printf("Runtime (without IO) in cpu-seconds: %.2f\n", + (float)(runtime/100.0)); + + } + if((!no_accuracy) && (verbosity>=1)) { + printf("Accuracy on test set: %.2f%% (%ld correct, %ld incorrect, %ld total)\n",(float)(correct)*100.0/totdoc,correct,incorrect,totdoc); + printf("Precision/recall on test set: %.2f%%/%.2f%%\n",(float)(res_a)*100.0/(res_a+res_b),(float)(res_a)*100.0/(res_a+res_c)); + } + + return(0); +} + +void read_input_parameters(int argc, char **argv, char *docfile, + char *modelfile, char *predictionsfile, + long int *verbosity, long int *pred_format) +{ + long i; + + /* set default */ + strcpy (modelfile, "svm_model"); + strcpy (predictionsfile, "svm_predictions"); + (*verbosity)=2; + (*pred_format)=1; + + for(i=1;(i=argc) { + printf("\nNot enough input parameters!\n\n"); + print_help(); + exit(0); + } + strcpy (docfile, argv[i]); + strcpy (modelfile, argv[i+1]); + if((i+2) this help\n"); + printf(" -v [0..3] -> verbosity level (default 2)\n"); + printf(" -f [0,1] -> 0: old output format of V1.0\n"); + printf(" -> 1: output the value of decision function (default)\n\n"); +} + + + + diff --git a/inst/include/svmlight/svm_common.h b/inst/include/svmlight/svm_common.h new file mode 100644 index 00000000..2c6834ac --- /dev/null +++ b/inst/include/svmlight/svm_common.h @@ -0,0 +1,303 @@ +/************************************************************************/ +/* */ +/* svm_common.h */ +/* */ +/* Definitions and functions used in both svm_learn and svm_classify. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.02 */ +/* */ +/* Copyright (c) 2002 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/************************************************************************/ + +#ifndef SVM_COMMON +#define SVM_COMMON + +# include +# include +# include +# include +# include +# include +# include +# include + +# define VERSION "V6.02" +# define VERSION_DATE "14.08.08" + +# define CFLOAT float /* the type of float to use for caching */ + /* kernel evaluations. Using float saves */ + /* us some memory, but you can use double, too */ +# define FNUM long /* the type used for storing feature ids */ +# define FVAL float /* the type used for storing feature values */ +# define MAXFEATNUM 99999999 /* maximum feature number (must be in + valid range of FNUM type and long int!) */ + +# define LINEAR 0 /* linear kernel type */ +# define POLY 1 /* polynoial kernel type */ +# define RBF 2 /* rbf kernel type */ +# define SIGMOID 3 /* sigmoid kernel type */ + +# define CLASSIFICATION 1 /* train classification model */ +# define REGRESSION 2 /* train regression model */ +# define RANKING 3 /* train ranking model */ +# define OPTIMIZATION 4 /* train on general set of constraints */ + +# define MAXSHRINK 50000 /* maximum number of shrinking rounds */ + +typedef struct word { + FNUM wnum; /* word number */ + FVAL weight; /* word weight */ +} WORD; + +typedef struct svector { + WORD *words; /* The features/values in the vector by + increasing feature-number. Feature + numbers that are skipped are + interpreted as having value zero. */ + double twonorm_sq; /* The squared euclidian length of the + vector. Used to speed up the RBF kernel. */ + char *userdefined; /* You can put additional information + here. This can be useful, if you are + implementing your own kernel that + does not work with feature/values + representations (for example a + string kernel). By default, + svm-light will put here the string + after the # sign from each line of + the input file. */ + long kernel_id; /* Feature vectors with different + kernel_id's are orthogonal (ie. the + feature number do not match). This + is used for computing component + kernels for linear constraints which + are a sum of several different + weight vectors. (currently not + implemented). */ + struct svector *next; /* Let's you set up a list of SVECTOR's + for linear constraints which are a + sum of multiple feature + vectors. List is terminated by + NULL. */ + double factor; /* Factor by which this feature vector + is multiplied in the sum. */ +} SVECTOR; + +typedef struct doc { + long docnum; /* Document ID. This has to be the position of + the document in the training set array. */ + long queryid; /* for learning rankings, constraints are + generated for documents with the same + queryID. */ + double costfactor; /* Scales the cost of misclassifying this + document by this factor. The effect of this + value is, that the upper bound on the alpha + for this example is scaled by this factor. + The factors are set by the feature + 'cost:' in the training data. */ + long slackid; /* Index of the slack variable + corresponding to this + constraint. All constraints with the + same slackid share the same slack + variable. This can only be used for + svm_learn_optimization. */ + SVECTOR *fvec; /* Feature vector of the example. The + feature vector can actually be a + list of feature vectors. For + example, the list will have two + elements, if this DOC is a + preference constraint. The one + vector that is supposed to be ranked + higher, will have a factor of +1, + the lower ranked one should have a + factor of -1. */ +} DOC; + +typedef struct learn_parm { + long type; /* selects between regression and + classification */ + double svm_c; /* upper bound C on alphas */ + double eps; /* regression epsilon (eps=1.0 for + classification */ + double svm_costratio; /* factor to multiply C for positive examples */ + double transduction_posratio;/* fraction of unlabeled examples to be */ + /* classified as positives */ + long biased_hyperplane; /* if nonzero, use hyperplane w*x+b=0 + otherwise w*x=0 */ + long sharedslack; /* if nonzero, it will use the shared + slack variable mode in + svm_learn_optimization. It requires + that the slackid is set for every + training example */ + long svm_maxqpsize; /* size q of working set */ + long svm_newvarsinqp; /* new variables to enter the working set + in each iteration */ + long kernel_cache_size; /* size of kernel cache in megabytes */ + double epsilon_crit; /* tolerable error for distances used + in stopping criterion */ + double epsilon_shrink; /* how much a multiplier should be above + zero for shrinking */ + long svm_iter_to_shrink; /* iterations h after which an example can + be removed by shrinking */ + long maxiter; /* number of iterations after which the + optimizer terminates, if there was + no progress in maxdiff */ + long remove_inconsistent; /* exclude examples with alpha at C and + retrain */ + long skip_final_opt_check; /* do not check KT-Conditions at the end of + optimization for examples removed by + shrinking. WARNING_LEVEL: This might lead to + sub-optimal solutions! */ + long compute_loo; /* if nonzero, computes leave-one-out + estimates */ + double rho; /* parameter in xi/alpha-estimates and for + pruning leave-one-out range [1..2] */ + long xa_depth; /* parameter in xi/alpha-estimates upper + bounding the number of SV the current + alpha_t is distributed over */ + char predfile[200]; /* file for predicitions on unlabeled examples + in transduction */ + char alphafile[200]; /* file to store optimal alphas in. use + empty string if alphas should not be + output */ + + /* you probably do not want to touch the following */ + double epsilon_const; /* tolerable error on eq-constraint */ + double epsilon_a; /* tolerable error on alphas at bounds */ + double opt_precision; /* precision of solver, set to e.g. 1e-21 + if you get convergence problems */ + + /* the following are only for internal use */ + long svm_c_steps; /* do so many steps for finding optimal C */ + double svm_c_factor; /* increase C by this factor every step */ + double svm_costratio_unlab; + double svm_unlabbound; + double *svm_cost; /* individual upper bounds for each var */ + long totwords; /* number of features */ + long iterations; +} LEARN_PARM; + +typedef struct kernel_parm { + long kernel_type; /* 0=linear, 1=poly, 2=rbf, 3=sigmoid, 4=custom */ + long poly_degree; + double rbf_gamma; + double coef_lin; + double coef_const; + char custom[50]; /* for user supplied kernel */ +} KERNEL_PARM; + +typedef struct model { + long sv_num; + long at_upper_bound; + double b; + DOC **supvec; + double *alpha; + long *index; /* index from docnum to position in model */ + long totwords; /* number of features */ + long totdoc; /* number of training documents */ + KERNEL_PARM kernel_parm; /* kernel */ + + /* the following values are not written to file */ + double loo_error,loo_recall,loo_precision; /* leave-one-out estimates */ + double xa_error,xa_recall,xa_precision; /* xi/alpha estimates */ + double *lin_weights; /* weights for linear case using + folding */ + double maxdiff; /* precision, up to which this + model is accurate */ +} MODEL; + +typedef struct quadratic_program { + long opt_n; /* number of variables */ + long opt_m; /* number of linear equality constraints */ + double *opt_ce,*opt_ce0; /* linear equality constraints */ + double *opt_g; /* hessian of objective */ + double *opt_g0; /* linear part of objective */ + double *opt_xinit; /* initial value for variables */ + double *opt_low,*opt_up; /* box constraints */ +} QP; + +typedef struct kernel_cache { + long *index; /* cache some kernel evalutations */ + CFLOAT *buffer; /* to improve speed */ + long *invindex; + long *active2totdoc; + long *totdoc2active; + long *lru; + long *occu; + long elems; + long max_elems; + long time; + long activenum; + long buffsize; +} KERNEL_CACHE; + + +typedef struct timing_profile { + long time_kernel; + long time_opti; + long time_shrink; + long time_update; + long time_model; + long time_check; + long time_select; +} TIMING; + +typedef struct shrink_state { + long *active; + long *inactive_since; + long deactnum; + double **a_history; /* for shrinking with non-linear kernel */ + long maxhistory; + double *last_a; /* for shrinking with linear kernel */ + double *last_lin; /* for shrinking with linear kernel */ +} SHRINK_STATE; + +double classify_example(MODEL *, DOC *); +double classify_example_linear(MODEL *, DOC *); +double kernel(KERNEL_PARM *, DOC *, DOC *); +double single_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); +double custom_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); +SVECTOR *create_svector(WORD *, char *, double); +SVECTOR *copy_svector(SVECTOR *); +void free_svector(SVECTOR *); +double sprod_ss(SVECTOR *, SVECTOR *); +SVECTOR* sub_ss(SVECTOR *, SVECTOR *); +SVECTOR* add_ss(SVECTOR *, SVECTOR *); +SVECTOR* add_list_ss(SVECTOR *); +void append_svector_list(SVECTOR *a, SVECTOR *b); +SVECTOR* smult_s(SVECTOR *, double); +int featvec_eq(SVECTOR *, SVECTOR *); +double model_length_s(MODEL *, KERNEL_PARM *); +void clear_vector_n(double *, long); +void add_vector_ns(double *, SVECTOR *, double); +double sprod_ns(double *, SVECTOR *); +void add_weight_vector_to_linear_model(MODEL *); +DOC *create_example(long, long, long, double, SVECTOR *); +void free_example(DOC *, long); +MODEL *read_model(char *); +MODEL *copy_model(MODEL *); +void free_model(MODEL *, int); +void read_documents(char *, DOC ***, double **, long *, long *); +int parse_document(char *, WORD *, double *, long *, long *, double *, long *, long, char **); +double *read_alphas(char *,long); +void nol_ll(char *, long *, long *, long *); +long minl(long, long); +long maxl(long, long); +long get_runtime(void); +int space_or_null(int); +void *my_malloc(size_t); +void copyright_notice(void); +# ifdef _MSC_VER + int isnan(double); +# endif + +extern long verbosity; /* verbosity level (0-4) */ +extern long kernel_cache_statistic; + +#endif diff --git a/inst/include/svmlight/svm_learn.h b/inst/include/svmlight/svm_learn.h new file mode 100644 index 00000000..b1968ee5 --- /dev/null +++ b/inst/include/svmlight/svm_learn.h @@ -0,0 +1,169 @@ +/***********************************************************************/ +/* */ +/* svm_learn.h */ +/* */ +/* Declarations for learning module of Support Vector Machine. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.02 */ +/* */ +/* Copyright (c) 2002 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/***********************************************************************/ + +#ifndef SVM_LEARN +#define SVM_LEARN + +void svm_learn_classification(DOC **, double *, long, long, LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE *, MODEL *, + double *); +void svm_learn_regression(DOC **, double *, long, long, LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE **, MODEL *); +void svm_learn_ranking(DOC **, double *, long, long, LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE **, MODEL *); +void svm_learn_optimization(DOC **, double *, long, long, LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE *, MODEL *, + double *); +long optimize_to_convergence(DOC **, long *, long, long, LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE *, SHRINK_STATE *, + MODEL *, long *, long *, double *, + double *, double *, + TIMING *, double *, long, long); +long optimize_to_convergence_sharedslack(DOC **, long *, long, long, + LEARN_PARM *, + KERNEL_PARM *, KERNEL_CACHE *, SHRINK_STATE *, + MODEL *, double *, double *, double *, + TIMING *, double *); +double compute_objective_function(double *, double *, double *, double, + long *, long *); +void clear_index(long *); +void add_to_index(long *, long); +long compute_index(long *,long, long *); +void optimize_svm(DOC **, long *, long *, long *, double, long *, long *, + MODEL *, + long, long *, long, double *, double *, double *, + LEARN_PARM *, CFLOAT *, KERNEL_PARM *, QP *, double *); +void compute_matrices_for_optimization(DOC **, long *, long *, long *, double, + long *, + long *, long *, MODEL *, double *, + double *, double *, long, long, LEARN_PARM *, + CFLOAT *, KERNEL_PARM *, QP *); +long calculate_svm_model(DOC **, long *, long *, double *, double *, + double *, double *, LEARN_PARM *, long *, + long *, MODEL *); +long check_optimality(MODEL *, long *, long *, double *, double *, + double *, long, + LEARN_PARM *,double *, double, long *, long *, long *, + long *, long, KERNEL_PARM *); +long check_optimality_sharedslack(DOC **docs, MODEL *model, long int *label, + double *a, double *lin, double *c, double *slack, + double *alphaslack, long int totdoc, + LEARN_PARM *learn_parm, double *maxdiff, + double epsilon_crit_org, long int *misclassified, + long int *active2dnum, + long int *last_suboptimal_at, + long int iteration, KERNEL_PARM *kernel_parm); +void compute_shared_slacks(DOC **docs, long int *label, double *a, + double *lin, double *c, long int *active2dnum, + LEARN_PARM *learn_parm, + double *slack, double *alphaslack); +long identify_inconsistent(double *, long *, long *, long, LEARN_PARM *, + long *, long *); +long identify_misclassified(double *, long *, long *, long, + MODEL *, long *, long *); +long identify_one_misclassified(double *, long *, long *, long, + MODEL *, long *, long *); +long incorporate_unlabeled_examples(MODEL *, long *,long *, long *, + double *, double *, long, double *, + long *, long *, long, KERNEL_PARM *, + LEARN_PARM *); +void update_linear_component(DOC **, long *, long *, double *, double *, + long *, long, long, KERNEL_PARM *, + KERNEL_CACHE *, double *, + CFLOAT *, double *); +long select_next_qp_subproblem_grad(long *, long *, double *, + double *, double *, long, + long, LEARN_PARM *, long *, long *, + long *, double *, long *, KERNEL_CACHE *, + long, long *, long *); +long select_next_qp_subproblem_rand(long *, long *, double *, + double *, double *, long, + long, LEARN_PARM *, long *, long *, + long *, double *, long *, KERNEL_CACHE *, + long *, long *, long); +long select_next_qp_slackset(DOC **docs, long int *label, double *a, + double *lin, double *slack, double *alphaslack, + double *c, LEARN_PARM *learn_parm, + long int *active2dnum, double *maxviol); +void select_top_n(double *, long, long *, long); +void init_shrink_state(SHRINK_STATE *, long, long); +void shrink_state_cleanup(SHRINK_STATE *); +long shrink_problem(DOC **, LEARN_PARM *, SHRINK_STATE *, KERNEL_PARM *, + long *, long *, long, long, long, double *, long *); +void reactivate_inactive_examples(long *, long *, double *, SHRINK_STATE *, + double *, double*, long, long, long, LEARN_PARM *, + long *, DOC **, KERNEL_PARM *, + KERNEL_CACHE *, MODEL *, CFLOAT *, + double *, double *); + +/* cache kernel evalutations to improve speed */ +KERNEL_CACHE *kernel_cache_init(long, long); +void kernel_cache_cleanup(KERNEL_CACHE *); +void get_kernel_row(KERNEL_CACHE *,DOC **, long, long, long *, CFLOAT *, + KERNEL_PARM *); +void cache_kernel_row(KERNEL_CACHE *,DOC **, long, KERNEL_PARM *); +void cache_multiple_kernel_rows(KERNEL_CACHE *,DOC **, long *, long, + KERNEL_PARM *); +void kernel_cache_shrink(KERNEL_CACHE *,long, long, long *); +void kernel_cache_reset_lru(KERNEL_CACHE *); +long kernel_cache_malloc(KERNEL_CACHE *); +void kernel_cache_free(KERNEL_CACHE *,long); +long kernel_cache_free_lru(KERNEL_CACHE *); +CFLOAT *kernel_cache_clean_and_malloc(KERNEL_CACHE *,long); +long kernel_cache_touch(KERNEL_CACHE *,long); +long kernel_cache_check(KERNEL_CACHE *,long); +long kernel_cache_space_available(KERNEL_CACHE *); + +void compute_xa_estimates(MODEL *, long *, long *, long, DOC **, + double *, double *, KERNEL_PARM *, + LEARN_PARM *, double *, double *, double *); +double xa_estimate_error(MODEL *, long *, long *, long, DOC **, + double *, double *, KERNEL_PARM *, + LEARN_PARM *); +double xa_estimate_recall(MODEL *, long *, long *, long, DOC **, + double *, double *, KERNEL_PARM *, + LEARN_PARM *); +double xa_estimate_precision(MODEL *, long *, long *, long, DOC **, + double *, double *, KERNEL_PARM *, + LEARN_PARM *); +void avg_similarity_of_sv_of_one_class(MODEL *, DOC **, double *, long *, KERNEL_PARM *, double *, double *); +double most_similar_sv_of_same_class(MODEL *, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *); +double distribute_alpha_t_greedily(long *, long, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *, double); +double distribute_alpha_t_greedily_noindex(MODEL *, DOC **, double *, long, long *, KERNEL_PARM *, LEARN_PARM *, double); +void estimate_transduction_quality(MODEL *, long *, long *, long, DOC **, double *); +double estimate_margin_vcdim(MODEL *, double, double, KERNEL_PARM *); +double estimate_sphere(MODEL *, KERNEL_PARM *); +double estimate_r_delta_average(DOC **, long, KERNEL_PARM *); +double estimate_r_delta(DOC **, long, KERNEL_PARM *); +double length_of_longest_document_vector(DOC **, long, KERNEL_PARM *); + +void write_model(char *, MODEL *); +void write_prediction(char *, MODEL *, double *, double *, long *, long *, + long, LEARN_PARM *); +void write_alphas(char *, double *, long *, long); + +typedef struct cache_parm_s { + KERNEL_CACHE *kernel_cache; + CFLOAT *cache; + DOC **docs; + long m; + KERNEL_PARM *kernel_parm; + long offset,stepsize; +} cache_parm_t; + +#endif diff --git a/inst/include/svmlight/svm_learn_main.c b/inst/include/svmlight/svm_learn_main.c new file mode 100644 index 00000000..e2a157da --- /dev/null +++ b/inst/include/svmlight/svm_learn_main.c @@ -0,0 +1,397 @@ +/***********************************************************************/ +/* */ +/* svm_learn_main.c */ +/* */ +/* Command line interface to the learning module of the */ +/* Support Vector Machine. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.02 */ +/* */ +/* Copyright (c) 2000 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/***********************************************************************/ + + +/* if svm-learn is used out of C++, define it as extern "C" */ +#ifdef __cplusplus +extern "C" { +#endif + +# include "svm_common.h" +# include "svm_learn.h" + +#ifdef __cplusplus +} +#endif + +char docfile[200]; /* file with training examples */ +char modelfile[200]; /* file for resulting classifier */ +char restartfile[200]; /* file with initial alphas */ + +void read_input_parameters(int, char **, char *, char *, char *, long *, + LEARN_PARM *, KERNEL_PARM *); +void wait_any_key(); +void print_help(); + + + +int main (int argc, char* argv[]) +{ + DOC **docs; /* training examples */ + long totwords,totdoc,i; + double *target; + double *alpha_in=NULL; + KERNEL_CACHE *kernel_cache; + LEARN_PARM learn_parm; + KERNEL_PARM kernel_parm; + MODEL *model=(MODEL *)my_malloc(sizeof(MODEL)); + + read_input_parameters(argc,argv,docfile,modelfile,restartfile,&verbosity, + &learn_parm,&kernel_parm); + read_documents(docfile,&docs,&target,&totwords,&totdoc); + if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc); + + if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */ + kernel_cache=NULL; + } + else { + /* Always get a new kernel cache. It is not possible to use the + same cache for two different training runs */ + kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size); + } + + if(learn_parm.type == CLASSIFICATION) { + svm_learn_classification(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,kernel_cache,model,alpha_in); + } + else if(learn_parm.type == REGRESSION) { + svm_learn_regression(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,&kernel_cache,model); + } + else if(learn_parm.type == RANKING) { + svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,&kernel_cache,model); + } + else if(learn_parm.type == OPTIMIZATION) { + svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,kernel_cache,model,alpha_in); + } + + if(kernel_cache) { + /* Free the memory used for the cache. */ + kernel_cache_cleanup(kernel_cache); + } + + /* Warning: The model contains references to the original data 'docs'. + If you want to free the original data, and only keep the model, you + have to make a deep copy of 'model'. */ + /* deep_copy_of_model=copy_model(model); */ + write_model(modelfile,model); + + free(alpha_in); + free_model(model,0); + for(i=0;ipredfile, "trans_predictions"); + strcpy (learn_parm->alphafile, ""); + strcpy (restartfile, ""); + (*verbosity)=1; + learn_parm->biased_hyperplane=1; + learn_parm->sharedslack=0; + learn_parm->remove_inconsistent=0; + learn_parm->skip_final_opt_check=0; + learn_parm->svm_maxqpsize=10; + learn_parm->svm_newvarsinqp=0; + learn_parm->svm_iter_to_shrink=-9999; + learn_parm->maxiter=100000; + learn_parm->kernel_cache_size=40; + learn_parm->svm_c=0.0; + learn_parm->eps=0.1; + learn_parm->transduction_posratio=-1.0; + learn_parm->svm_costratio=1.0; + learn_parm->svm_costratio_unlab=1.0; + learn_parm->svm_unlabbound=1E-5; + learn_parm->epsilon_crit=0.001; + learn_parm->epsilon_a=1E-15; + learn_parm->compute_loo=0; + learn_parm->rho=1.0; + learn_parm->xa_depth=0; + kernel_parm->kernel_type=0; + kernel_parm->poly_degree=3; + kernel_parm->rbf_gamma=1.0; + kernel_parm->coef_lin=1; + kernel_parm->coef_const=1; + strcpy(kernel_parm->custom,"empty"); + strcpy(type,"c"); + + for(i=1;(ibiased_hyperplane=atol(argv[i]); break; + case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break; + case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break; + case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break; + case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break; + case '#': i++; learn_parm->maxiter=atol(argv[i]); break; + case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break; + case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break; + case 'c': i++; learn_parm->svm_c=atof(argv[i]); break; + case 'w': i++; learn_parm->eps=atof(argv[i]); break; + case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break; + case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break; + case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break; + case 'o': i++; learn_parm->rho=atof(argv[i]); break; + case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break; + case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break; + case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break; + case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break; + case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break; + case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break; + case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break; + case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break; + case 'l': i++; strcpy(learn_parm->predfile,argv[i]); break; + case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break; + case 'y': i++; strcpy(restartfile,argv[i]); break; + default: printf("\nUnrecognized option %s!\n\n",argv[i]); + print_help(); + exit(0); + } + } + if(i>=argc) { + printf("\nNot enough input parameters!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + strcpy (docfile, argv[i]); + if((i+1)svm_iter_to_shrink == -9999) { + if(kernel_parm->kernel_type == LINEAR) + learn_parm->svm_iter_to_shrink=2; + else + learn_parm->svm_iter_to_shrink=100; + } + if(strcmp(type,"c")==0) { + learn_parm->type=CLASSIFICATION; + } + else if(strcmp(type,"r")==0) { + learn_parm->type=REGRESSION; + } + else if(strcmp(type,"p")==0) { + learn_parm->type=RANKING; + } + else if(strcmp(type,"o")==0) { + learn_parm->type=OPTIMIZATION; + } + else if(strcmp(type,"s")==0) { + learn_parm->type=OPTIMIZATION; + learn_parm->sharedslack=1; + } + else { + printf("\nUnknown type '%s': Valid types are 'c' (classification), 'r' regession, and 'p' preference ranking.\n",type); + wait_any_key(); + print_help(); + exit(0); + } + if((learn_parm->skip_final_opt_check) + && (kernel_parm->kernel_type == LINEAR)) { + printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n"); + learn_parm->skip_final_opt_check=0; + } + if((learn_parm->skip_final_opt_check) + && (learn_parm->remove_inconsistent)) { + printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n"); + wait_any_key(); + print_help(); + exit(0); + } + if((learn_parm->svm_maxqpsize<2)) { + printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize); + wait_any_key(); + print_help(); + exit(0); + } + if((learn_parm->svm_maxqpsizesvm_newvarsinqp)) { + printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize); + printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->svm_iter_to_shrink<1) { + printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->svm_c<0) { + printf("\nThe C parameter must be greater than zero!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->transduction_posratio>1) { + printf("\nThe fraction of unlabeled examples to classify as positives must\n"); + printf("be less than 1.0 !!!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->svm_costratio<=0) { + printf("\nThe COSTRATIO parameter must be greater than zero!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->epsilon_crit<=0) { + printf("\nThe epsilon parameter must be greater than zero!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + if(learn_parm->rho<0) { + printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n"); + printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n"); + printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n"); + wait_any_key(); + print_help(); + exit(0); + } + if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) { + printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n"); + printf("for switching to the conventional xa/estimates described in T. Joachims,\n"); + printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n"); + wait_any_key(); + print_help(); + exit(0); + } +} + +void wait_any_key() +{ + printf("\n(more)\n"); + (void)getc(stdin); +} + +void print_help() +{ + printf("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE); + copyright_notice(); + printf(" usage: svm_learn [options] example_file model_file\n\n"); + printf("Arguments:\n"); + printf(" example_file-> file with training data\n"); + printf(" model_file -> file to store learned decision rule in\n"); + + printf("General options:\n"); + printf(" -? -> this help\n"); + printf(" -v [0..3] -> verbosity level (default 1)\n"); + printf("Learning options:\n"); + printf(" -z {c,r,p} -> select between classification (c), regression (r),\n"); + printf(" and preference ranking (p) (default classification)\n"); + printf(" -c float -> C: trade-off between training error\n"); + printf(" and margin (default [avg. x*x]^-1)\n"); + printf(" -w [0..] -> epsilon width of tube for regression\n"); + printf(" (default 0.1)\n"); + printf(" -j float -> Cost: cost-factor, by which training errors on\n"); + printf(" positive examples outweight errors on negative\n"); + printf(" examples (default 1) (see [4])\n"); + printf(" -b [0,1] -> use biased hyperplane (i.e. x*w+b>0) instead\n"); + printf(" of unbiased hyperplane (i.e. x*w>0) (default 1)\n"); + printf(" -i [0,1] -> remove inconsistent training examples\n"); + printf(" and retrain (default 0)\n"); + printf("Performance estimation options:\n"); + printf(" -x [0,1] -> compute leave-one-out estimates (default 0)\n"); + printf(" (see [5])\n"); + printf(" -o ]0..2] -> value of rho for XiAlpha-estimator and for pruning\n"); + printf(" leave-one-out computation (default 1.0) (see [2])\n"); + printf(" -k [0..100] -> search depth for extended XiAlpha-estimator \n"); + printf(" (default 0)\n"); + printf("Transduction options (see [3]):\n"); + printf(" -p [0..1] -> fraction of unlabeled examples to be classified\n"); + printf(" into the positive class (default is the ratio of\n"); + printf(" positive and negative examples in the training data)\n"); + printf("Kernel options:\n"); + printf(" -t int -> type of kernel function:\n"); + printf(" 0: linear (default)\n"); + printf(" 1: polynomial (s a*b+c)^d\n"); + printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n"); + printf(" 3: sigmoid tanh(s a*b + c)\n"); + printf(" 4: user defined kernel from kernel.h\n"); + printf(" -d int -> parameter d in polynomial kernel\n"); + printf(" -g float -> parameter gamma in rbf kernel\n"); + printf(" -s float -> parameter s in sigmoid/poly kernel\n"); + printf(" -r float -> parameter c in sigmoid/poly kernel\n"); + printf(" -u string -> parameter of user defined kernel\n"); + printf("Optimization options (see [1]):\n"); + printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n"); + printf(" -n [2..q] -> number of new variables entering the working set\n"); + printf(" in each iteration (default n = q). Set n size of cache for kernel evaluations in MB (default 40)\n"); + printf(" The larger the faster...\n"); + printf(" -e float -> eps: Allow that error for termination criterion\n"); + printf(" [y [w*x+b] - 1] >= eps (default 0.001)\n"); + printf(" -y [0,1] -> restart the optimization from alpha values in file\n"); + printf(" specified by -a option. (default 0)\n"); + printf(" -h [5..] -> number of iterations a variable needs to be\n"); + printf(" optimal before considered for shrinking (default 100)\n"); + printf(" -f [0,1] -> do final optimality check for variables removed\n"); + printf(" by shrinking. Although this test is usually \n"); + printf(" positive, there is no guarantee that the optimum\n"); + printf(" was found if the test is omitted. (default 1)\n"); + printf(" -y string -> if option is given, reads alphas from file with given\n"); + printf(" and uses them as starting point. (default 'disabled')\n"); + printf(" -# int -> terminate optimization, if no progress after this\n"); + printf(" number of iterations. (default 100000)\n"); + printf("Output options:\n"); + printf(" -l string -> file to write predicted labels of unlabeled\n"); + printf(" examples into after transductive learning\n"); + printf(" -a string -> write all alphas to this file after learning\n"); + printf(" (in the same order as in the training set)\n"); + wait_any_key(); + printf("\nMore details in:\n"); + printf("[1] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n"); + printf(" Kernel Methods - Support Vector Learning, B. Schölkopf and C. Burges and\n"); + printf(" A. Smola (ed.), MIT Press, 1999.\n"); + printf("[2] T. Joachims, Estimating the Generalization performance of an SVM\n"); + printf(" Efficiently. International Conference on Machine Learning (ICML), 2000.\n"); + printf("[3] T. Joachims, Transductive Inference for Text Classification using Support\n"); + printf(" Vector Machines. International Conference on Machine Learning (ICML),\n"); + printf(" 1999.\n"); + printf("[4] K. Morik, P. Brockhausen, and T. Joachims, Combining statistical learning\n"); + printf(" with a knowledge-based approach - A case study in intensive care \n"); + printf(" monitoring. International Conference on Machine Learning (ICML), 1999.\n"); + printf("[5] T. Joachims, Learning to Classify Text Using Support Vector\n"); + printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n"); + printf(" 2002.\n\n"); +} + + diff --git a/inst/include/svmlight/svm_loqo.c b/inst/include/svmlight/svm_loqo.c new file mode 100644 index 00000000..ff31a655 --- /dev/null +++ b/inst/include/svmlight/svm_loqo.c @@ -0,0 +1,211 @@ +/***********************************************************************/ +/* */ +/* svm_loqo.c */ +/* */ +/* Interface to the PR_LOQO optimization package for SVM. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 19.07.99 */ +/* */ +/* Copyright (c) 1999 Universitaet Dortmund - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/***********************************************************************/ + +# include +# include "pr_loqo/pr_loqo.h" +# include "svm_common.h" + +/* Common Block Declarations */ + +long verbosity; + +/* /////////////////////////////////////////////////////////////// */ + +# define DEF_PRECISION_LINEAR 1E-8 +# define DEF_PRECISION_NONLINEAR 1E-14 + +double *optimize_qp(); +double *primal=0,*dual=0; +double init_margin=0.15; +long init_iter=500,precision_violations=0; +double model_b; +double opt_precision=DEF_PRECISION_LINEAR; + +/* /////////////////////////////////////////////////////////////// */ + +void *my_malloc(); + +double *optimize_qp(qp,epsilon_crit,nx,threshold,learn_parm) +QP *qp; +double *epsilon_crit; +long nx; /* Maximum number of variables in QP */ +double *threshold; +LEARN_PARM *learn_parm; +/* start the optimizer and return the optimal values */ +{ + register long i,j,result; + double margin,obj_before,obj_after; + double sigdig,dist,epsilon_loqo; + int iter; + + if(!primal) { /* allocate memory at first call */ + primal=(double *)my_malloc(sizeof(double)*nx*3); + dual=(double *)my_malloc(sizeof(double)*(nx*2+1)); + } + + if(verbosity>=4) { /* really verbose */ + printf("\n\n"); + for(i=0;iopt_n;i++) { + printf("%f: ",qp->opt_g0[i]); + for(j=0;jopt_n;j++) { + printf("%f ",qp->opt_g[i*qp->opt_n+j]); + } + printf(": a%ld=%.10f < %f",i,qp->opt_xinit[i],qp->opt_up[i]); + printf(": y=%f\n",qp->opt_ce[i]); + } + for(j=0;jopt_m;j++) { + printf("EQ-%ld: %f*a0",j,qp->opt_ce[j]); + for(i=1;iopt_n;i++) { + printf(" + %f*a%ld",qp->opt_ce[i],i); + } + printf(" = %f\n\n",-qp->opt_ce0[0]); + } +} + + obj_before=0; /* calculate objective before optimization */ + for(i=0;iopt_n;i++) { + obj_before+=(qp->opt_g0[i]*qp->opt_xinit[i]); + obj_before+=(0.5*qp->opt_xinit[i]*qp->opt_xinit[i]*qp->opt_g[i*qp->opt_n+i]); + for(j=0;jopt_xinit[j]*qp->opt_xinit[i]*qp->opt_g[j*qp->opt_n+i]); + } + } + + result=STILL_RUNNING; + qp->opt_ce0[0]*=(-1.0); + /* Run pr_loqo. If a run fails, try again with parameters which lead */ + /* to a slower, but more robust setting. */ + for(margin=init_margin,iter=init_iter; + (margin<=0.9999999) && (result!=OPTIMAL_SOLUTION);) { + sigdig=-log10(opt_precision); + + result=pr_loqo((int)qp->opt_n,(int)qp->opt_m, + (double *)qp->opt_g0,(double *)qp->opt_g, + (double *)qp->opt_ce,(double *)qp->opt_ce0, + (double *)qp->opt_low,(double *)qp->opt_up, + (double *)primal,(double *)dual, + (int)(verbosity-2), + (double)sigdig,(int)iter, + (double)margin,(double)(qp->opt_up[0])/4.0,(int)0); + + if(isnan(dual[0])) { /* check for choldc problem */ + if(verbosity>=2) { + printf("NOTICE: Restarting PR_LOQO with more conservative parameters.\n"); + } + if(init_margin<0.80) { /* become more conservative in general */ + init_margin=(4.0*margin+1.0)/5.0; + } + margin=(margin+1.0)/2.0; + (opt_precision)*=10.0; /* reduce precision */ + if(verbosity>=2) { + printf("NOTICE: Reducing precision of PR_LOQO.\n"); + } + } + else if(result!=OPTIMAL_SOLUTION) { + iter+=2000; + init_iter+=10; + (opt_precision)*=10.0; /* reduce precision */ + if(verbosity>=2) { + printf("NOTICE: Reducing precision of PR_LOQO due to (%ld).\n",result); + } + } + } + + if(qp->opt_m) /* Thanks to Alex Smola for this hint */ + model_b=dual[0]; + else + model_b=0; + + /* Check the precision of the alphas. If results of current optimization */ + /* violate KT-Conditions, relax the epsilon on the bounds on alphas. */ + epsilon_loqo=1E-10; + for(i=0;iopt_n;i++) { + dist=-model_b*qp->opt_ce[i]; + dist+=(qp->opt_g0[i]+1.0); + for(j=0;jopt_g[j*qp->opt_n+i]); + } + for(j=i;jopt_n;j++) { + dist+=(primal[j]*qp->opt_g[i*qp->opt_n+j]); + } + /* printf("LOQO: a[%d]=%f, dist=%f, b=%f\n",i,primal[i],dist,dual[0]); */ + if((primal[i]<(qp->opt_up[i]-epsilon_loqo)) && (dist < (1.0-(*epsilon_crit)))) { + epsilon_loqo=(qp->opt_up[i]-primal[i])*2.0; + } + else if((primal[i]>(0+epsilon_loqo)) && (dist > (1.0+(*epsilon_crit)))) { + epsilon_loqo=primal[i]*2.0; + } + } + + for(i=0;iopt_n;i++) { /* clip alphas to bounds */ + if(primal[i]<=(0+epsilon_loqo)) { + primal[i]=0; + } + else if(primal[i]>=(qp->opt_up[i]-epsilon_loqo)) { + primal[i]=qp->opt_up[i]; + } + } + + obj_after=0; /* calculate objective after optimization */ + for(i=0;iopt_n;i++) { + obj_after+=(qp->opt_g0[i]*primal[i]); + obj_after+=(0.5*primal[i]*primal[i]*qp->opt_g[i*qp->opt_n+i]); + for(j=0;jopt_g[j*qp->opt_n+i]); + } + } + + /* if optimizer returned NAN values, reset and retry with smaller */ + /* working set. */ + if(isnan(obj_after) || isnan(model_b)) { + for(i=0;iopt_n;i++) { + primal[i]=qp->opt_xinit[i]; + } + model_b=0; + if(learn_parm->svm_maxqpsize>2) { + learn_parm->svm_maxqpsize--; /* decrease size of qp-subproblems */ + } + } + + if(obj_after >= obj_before) { /* check whether there was progress */ + (opt_precision)/=100.0; + precision_violations++; + if(verbosity>=2) { + printf("NOTICE: Increasing Precision of PR_LOQO.\n"); + } + } + + if(precision_violations > 500) { + (*epsilon_crit)*=10.0; + precision_violations=0; + if(verbosity>=1) { + printf("\nWARNING: Relaxing epsilon on KT-Conditions.\n"); + } + } + + (*threshold)=model_b; + + if(result!=OPTIMAL_SOLUTION) { + printf("\nERROR: PR_LOQO did not converge. \n"); + return(qp->opt_xinit); + } + else { + return(primal); + } +} + diff --git a/inst/include/utils/circular_buffer.h b/inst/include/utils/circular_buffer.h new file mode 100644 index 00000000..ee589533 --- /dev/null +++ b/inst/include/utils/circular_buffer.h @@ -0,0 +1,493 @@ +/****************************************************************************** + * $Id: $ + * $Name: $ + * + * Author: Pete Goodliffe + * + * --------------------------------------------------------- + * ------------------- + * Copyright 2002 Pete Goodliffe All rights reserved. + * + * ---------------------------------------------------------------------------- + * Purpose: STL-style circular buffer + * + * ---------------------------------------------------------------------------- + * History: See source control system log. + * + *****************************************************************************/ + +#ifndef CIRCULAR_BUFFER_H +#define CIRCULAR_BUFFER_H + +#include +#include +#include + +/****************************************************************************** + * Iterators + *****************************************************************************/ + +/** + * Iterator type for the circular_buffer class. + * + * This one template class provides all variants of forward/reverse + * const/non const iterators through plentiful template magic. + * + * You don't need to instantiate it directly, use the good public functions + * availble in circular_buffer. + */ +template //+ const for const iter +class circular_buffer_iterator +{ + public: + + typedef circular_buffer_iterator self_type; + + typedef T cbuf_type; + typedef std::random_access_iterator_tag iterator_category; + typedef typename cbuf_type::value_type value_type; + typedef typename cbuf_type::size_type size_type; + typedef typename cbuf_type::pointer pointer; + typedef typename cbuf_type::const_pointer const_pointer; + typedef typename cbuf_type::reference reference; + typedef typename cbuf_type::const_reference const_reference; + typedef typename cbuf_type::difference_type difference_type; + + circular_buffer_iterator(cbuf_type *b, size_type p) + : buf_(b), pos_(p) {} + + // Converting a non-const iterator to a const iterator + circular_buffer_iterator + (const circular_buffer_iterator + &other) + : buf_(other.buf_), pos_(other.pos_) {} + friend class circular_buffer_iterator; + + // Use compiler generated copy ctor, copy assignment operator and dtor + + elem_type &operator*() { return (*buf_)[pos_]; } + elem_type *operator->() { return &(operator*()); } + + self_type &operator++() + { + pos_ += 1; + return *this; + } + self_type operator++(int) + { + self_type tmp(*this); + ++(*this); + return tmp; + } + + self_type &operator--() + { + pos_ -= 1; + return *this; + } + self_type operator--(int) + { + self_type tmp(*this); + --(*this); + return tmp; + } + + self_type operator+(difference_type n) const + { + self_type tmp(*this); + tmp.pos_ += n; + return tmp; + } + self_type &operator+=(difference_type n) + { + pos_ += n; + return *this; + } + + self_type operator-(difference_type n) const + { + self_type tmp(*this); + tmp.pos_ -= n; + return tmp; + } + self_type &operator-=(difference_type n) + { + pos_ -= n; + return *this; + } + + difference_type operator-(const self_type &c) const + { + return pos_ - c.pos_; + } + + bool operator==(const self_type &other) const + { + return pos_ == other.pos_ && buf_ == other.buf_; + } + bool operator!=(const self_type &other) const + { + return pos_ != other.pos_ && buf_ == other.buf_; + } + bool operator>(const self_type &other) const + { + return pos_ > other.pos_; + } + bool operator>=(const self_type &other) const + { + return pos_ >= other.pos_; + } + bool operator<(const self_type &other) const + { + return pos_ < other.pos_; + } + bool operator<=(const self_type &other) const + { + return pos_ <= other.pos_; + } + + private: + + cbuf_type *buf_; + size_type pos_; +}; + +template +circular_buffer_iterator_t operator+ + (const typename circular_buffer_iterator_t::difference_type &a, + const circular_buffer_iterator_t &b) +{ + return circular_buffer_iterator_t(a) + b; +} + +template +circular_buffer_iterator_t operator- + (const typename circular_buffer_iterator_t::difference_type &a, + const circular_buffer_iterator_t &b) +{ + return circular_buffer_iterator_t(a) - b; +} + + +/****************************************************************************** + * circular_buffer + *****************************************************************************/ + +/** + * This class provides a circular buffer in the STL style. + * + * You can add data to the end using the @ref push_back function, read data + * using @ref front() and remove data using @ref pop_front(). + * + * The class also provides random access through the @ref operator[]() + * function and its random access iterator. Subscripting the array with + * an invalid (out of range) index number leads to undefined results, both + * for reading and writing. + * + * This class template accepts three template parameters: + *
  • T The type of object contained + *
  • always_accept_data_when_full Determines the behaviour of + * @ref push_back when the buffer is full. + * Set to true new data is always added, the + * old "end" data is thrown away. + * Set to false, the new data is not added. + * No error is returned neither is an + * exception raised. + *
  • Alloc Allocator type to use (in line with other + * STL containers). + * + * @short STL style circule buffer + * @author Pete Goodliffe + * @version 1.00 + */ +template > +class circular_buffer +{ + public: + + enum + { + version_major = 1, + version_minor = 0 + }; + + // Typedefs + typedef circular_buffer + self_type; + + typedef Alloc allocator_type; + + typedef typename Alloc::value_type value_type; + typedef typename Alloc::pointer pointer; + typedef typename Alloc::const_pointer const_pointer; + typedef typename Alloc::reference reference; + typedef typename Alloc::const_reference const_reference; + + typedef typename Alloc::size_type size_type; + typedef typename Alloc::difference_type difference_type; + + typedef circular_buffer_iterator + + iterator; + typedef circular_buffer_iterator + + const_iterator; + typedef std::reverse_iterator reverse_iterator; + typedef std::reverse_iterator const_reverse_iterator; + + // Lifetime + enum { default_capacity = 100 }; + explicit circular_buffer(size_type capacity = default_capacity) + : array_(alloc_.allocate(capacity)), array_size_(capacity), + head_(1), tail_(0), contents_size_(0) + { + } + circular_buffer(const circular_buffer &other) + : array_(alloc_.allocate(other.array_size_)), + array_size_(other.array_size_), + head_(other.head_), tail_(other.tail_), + contents_size_(other.contents_size_) + { + try + { + assign_into(other.begin(), other.end()); + } + catch (...) + { + destroy_all_elements(); + alloc_.deallocate(array_, array_size_); + throw; + } + } + template + circular_buffer(InputIterator from, InputIterator to) + : array_(alloc_.allocate(1)), array_size_(1), + head_(1), tail_(0), contents_size_(0) + { + circular_buffer tmp; + tmp.assign_into_reserving(from, to); + swap(tmp); + } + ~circular_buffer() + { + destroy_all_elements(); + alloc_.deallocate(array_, array_size_); + } + circular_buffer &operator=(const self_type &other) + { + circular_buffer tmp(other); + swap(tmp); + return *this; + } + void swap(circular_buffer &other) + { + std::swap(array_, other.array_); + std::swap(array_size_, other.array_size_); + std::swap(head_, other.head_); + std::swap(tail_, other.tail_); + std::swap(contents_size_, other.contents_size_); + } + allocator_type get_allocator() const { return alloc_; } + + // Iterators + iterator begin() { return iterator(this, 0); } + iterator end() { return iterator(this, size()); } + + const_iterator begin() const { return const_iterator(this, 0); } + const_iterator end() const { return const_iterator(this, size()); } + + reverse_iterator rbegin() { return reverse_iterator(end()); } + reverse_iterator rend() { return reverse_iterator(begin()); } + + const_reverse_iterator rbegin() const + { + return const_reverse_iterator(end()); + } + const_reverse_iterator rend() const + { + return const_reverse_iterator(begin()); + } + + // Size + size_type size() const { return contents_size_; } + size_type capacity() const { return array_size_; } + bool empty() const { return !contents_size_; } + size_type max_size() const + { + return alloc_.max_size(); + } + void reserve(size_type new_size) + { + if (capacity() < new_size) + { + circular_buffer tmp(new_size); + tmp.assign_into(begin(), end()); + swap(tmp); + } + } + + // Accessing + reference front() {return array_[head_];} + reference back() {return array_[tail_];} + const_reference front() const {return array_[head_];} + const_reference back() const {return array_[tail_];} + + void push_back(const value_type &item) + { + size_type next = next_tail(); + if (contents_size_ == array_size_) + { + if (always_accept_data_when_full) + { + array_[next] = item; + increment_head(); + } + } + else + { + alloc_.construct(array_ + next, item); + } + increment_tail(); + } + void pop_front() + { + size_type destroy_pos = head_; + increment_head(); + alloc_.destroy(array_ + destroy_pos); + } + void clear() + { + for (size_type n = 0; n < contents_size_; ++n) + { + alloc_.destroy(array_ + index_to_subscript(n)); + } + head_ = 1; + tail_ = contents_size_ = 0; + } + + reference operator[](size_type n) {return at_unchecked(n);} + const_reference operator[](size_type n) const {return at_unchecked(n);} + + reference at(size_type n) {return at_checked(n);} + const_reference at(size_type n) const {return at_checked(n);} + + private: + + reference at_unchecked(size_type index) const + { + return array_[index_to_subscript(index)]; + } + + reference at_checked(size_type index) const + { + if (size() >= contents_size_) + { + throw std::exception(); + } + return at_unchecked(index); + } + + // Rounds an unbounded to an index into array_ + size_type normalise(size_type n) const { return n % array_size_; } + + // Converts external index to an array subscript + size_type index_to_subscript(size_type index) const + { + return normalise(index + head_); + } + + void increment_tail() + { + ++contents_size_; + tail_ = next_tail(); + } + + size_type next_tail() + { + return (tail_+1 == array_size_) ? 0 : tail_+1; + } + + void increment_head() + { + // precondition: !empty() + ++head_; + --contents_size_; + if (head_ == array_size_) head_ = 0; + } + + template + void assign_into(f_iter from, f_iter to) + { + if (contents_size_) clear(); + while (from != to) + { + push_back(*from); + ++from; + } + } + + template + void assign_into_reserving(f_iter from, f_iter to) + { + if (contents_size_) clear(); + while (from != to) + { + if (contents_size_ == array_size_) + { + reserve(static_cast(array_size_ * 1.5)); + } + push_back(*from); + ++from; + } + } + + void destroy_all_elements() + { + for (size_type n = 0; n < contents_size_; ++n) + { + alloc_.destroy(array_ + index_to_subscript(n)); + } + } + + allocator_type alloc_; + value_type *array_; + size_type array_size_; + size_type head_; + size_type tail_; + size_type contents_size_; +}; + +template +bool operator==(const circular_buffer &a, + const circular_buffer &b) +{ + return a.size() == b.size() && std::equal(a.begin(), a.end(), b.begin()); +} + +template +bool operator!=(const circular_buffer &a, + const circular_buffer &b) +{ + return a.size() != b.size() || !std::equal(a.begin(), a.end(), b.begin()); +} + +template +bool operator<(const circular_buffer &a, + const circular_buffer &b) +{ + return std::lexicographical_compare(a.begin(), a.end(), b.begin(), b.end()); +} + +#endif diff --git a/inst/include/utils/cutils.h b/inst/include/utils/cutils.h new file mode 100644 index 00000000..0a6436c4 --- /dev/null +++ b/inst/include/utils/cutils.h @@ -0,0 +1,22 @@ +/** + * @file: cutils.h + * @description: Separated utils (for C targets) + */ + +#ifndef CUTILS_H +#define CUTILS_H + +#ifdef RCPP_INTERFACE +// FIXME? +#define C_EXIT(x) +#define C_FFLUSH(stream) +#define C_FPRINTF(stream, ...) +#define C_PRINTF(...) +#else +#define C_EXIT(x) exit(x); +#define C_FFLUSH(stream) fflush(stream); +#define C_FPRINTF(stream, ...) fprintf(stream, __VA_ARGS__); +#define C_PRINTF(...) printf(__VA_ARGS__); +#endif + +#endif diff --git a/inst/include/utils/fast_mutex.h b/inst/include/utils/fast_mutex.h new file mode 100644 index 00000000..4d4b7cc4 --- /dev/null +++ b/inst/include/utils/fast_mutex.h @@ -0,0 +1,248 @@ +/* -*- mode: c++; tab-width: 2; indent-tabs-mode: nil; -*- +Copyright (c) 2010-2012 Marcus Geelnard + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source + distribution. +*/ + +#ifndef _FAST_MUTEX_H_ +#define _FAST_MUTEX_H_ + +/// @file + +// Which platform are we on? +#if !defined(_TTHREAD_PLATFORM_DEFINED_) + #if defined(_WIN32) || defined(__WIN32__) || defined(__WINDOWS__) + #define _TTHREAD_WIN32_ + #else + #define _TTHREAD_POSIX_ + #endif + #define _TTHREAD_PLATFORM_DEFINED_ +#endif + +// Check if we can support the assembly language level implementation (otherwise +// revert to the system API) +#if (defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))) || \ + (defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))) || \ + (defined(__GNUC__) && (defined(__ppc__))) + #define _FAST_MUTEX_ASM_ +#else + #define _FAST_MUTEX_SYS_ +#endif + +#if defined(_TTHREAD_WIN32_) + #ifndef WIN32_LEAN_AND_MEAN + #define WIN32_LEAN_AND_MEAN + #define __UNDEF_LEAN_AND_MEAN + #endif + #include + #ifdef __UNDEF_LEAN_AND_MEAN + #undef WIN32_LEAN_AND_MEAN + #undef __UNDEF_LEAN_AND_MEAN + #endif +#else + #ifdef _FAST_MUTEX_ASM_ + #include + #else + #include + #endif +#endif + +namespace tthread { + +/// Fast mutex class. +/// This is a mutual exclusion object for synchronizing access to shared +/// memory areas for several threads. It is similar to the tthread::mutex class, +/// but instead of using system level functions, it is implemented as an atomic +/// spin lock with very low CPU overhead. +/// +/// The \c fast_mutex class is NOT compatible with the \c condition_variable +/// class (however, it IS compatible with the \c lock_guard class). It should +/// also be noted that the \c fast_mutex class typically does not provide +/// as accurate thread scheduling as a the standard \c mutex class does. +/// +/// Because of the limitations of the class, it should only be used in +/// situations where the mutex needs to be locked/unlocked very frequently. +/// +/// @note The "fast" version of this class relies on inline assembler language, +/// which is currently only supported for 32/64-bit Intel x86/AMD64 and +/// PowerPC architectures on a limited number of compilers (GNU g++ and MS +/// Visual C++). +/// For other architectures/compilers, system functions are used instead. +class fast_mutex { + public: + /// Constructor. +#if defined(_FAST_MUTEX_ASM_) + fast_mutex() : mLock(0) {} +#else + fast_mutex() + { + #if defined(_TTHREAD_WIN32_) + InitializeCriticalSection(&mHandle); + #elif defined(_TTHREAD_POSIX_) + pthread_mutex_init(&mHandle, NULL); + #endif + } +#endif + +#if !defined(_FAST_MUTEX_ASM_) + /// Destructor. + ~fast_mutex() + { + #if defined(_TTHREAD_WIN32_) + DeleteCriticalSection(&mHandle); + #elif defined(_TTHREAD_POSIX_) + pthread_mutex_destroy(&mHandle); + #endif + } +#endif + + /// Lock the mutex. + /// The method will block the calling thread until a lock on the mutex can + /// be obtained. The mutex remains locked until \c unlock() is called. + /// @see lock_guard + inline void lock() + { +#if defined(_FAST_MUTEX_ASM_) + bool gotLock; + do { + gotLock = try_lock(); + if(!gotLock) + { + #if defined(_TTHREAD_WIN32_) + Sleep(0); + #elif defined(_TTHREAD_POSIX_) + sched_yield(); + #endif + } + } while(!gotLock); +#else + #if defined(_TTHREAD_WIN32_) + EnterCriticalSection(&mHandle); + #elif defined(_TTHREAD_POSIX_) + pthread_mutex_lock(&mHandle); + #endif +#endif + } + + /// Try to lock the mutex. + /// The method will try to lock the mutex. If it fails, the function will + /// return immediately (non-blocking). + /// @return \c true if the lock was acquired, or \c false if the lock could + /// not be acquired. + inline bool try_lock() + { +#if defined(_FAST_MUTEX_ASM_) + int oldLock; + #if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) + asm volatile ( + "movl $1,%%eax\n\t" + "xchg %%eax,%0\n\t" + "movl %%eax,%1\n\t" + : "=m" (mLock), "=m" (oldLock) + : + : "%eax", "memory" + ); + #elif defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64)) + int *ptrLock = &mLock; + __asm { + mov eax,1 + mov ecx,ptrLock + xchg eax,[ecx] + mov oldLock,eax + } + #elif defined(__GNUC__) && (defined(__ppc__)) + int newLock = 1; + asm volatile ( + "\n1:\n\t" + "lwarx %0,0,%1\n\t" + "cmpwi 0,%0,0\n\t" + "bne- 2f\n\t" + "stwcx. %2,0,%1\n\t" + "bne- 1b\n\t" + "isync\n" + "2:\n\t" + : "=&r" (oldLock) + : "r" (&mLock), "r" (newLock) + : "cr0", "memory" + ); + #endif + return (oldLock == 0); +#else + #if defined(_TTHREAD_WIN32_) + return TryEnterCriticalSection(&mHandle) ? true : false; + #elif defined(_TTHREAD_POSIX_) + return (pthread_mutex_trylock(&mHandle) == 0) ? true : false; + #endif +#endif + } + + /// Unlock the mutex. + /// If any threads are waiting for the lock on this mutex, one of them will + /// be unblocked. + inline void unlock() + { +#if defined(_FAST_MUTEX_ASM_) + #if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) + asm volatile ( + "movl $0,%%eax\n\t" + "xchg %%eax,%0\n\t" + : "=m" (mLock) + : + : "%eax", "memory" + ); + #elif defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64)) + int *ptrLock = &mLock; + __asm { + mov eax,0 + mov ecx,ptrLock + xchg eax,[ecx] + } + #elif defined(__GNUC__) && (defined(__ppc__)) + asm volatile ( + "sync\n\t" // Replace with lwsync where possible? + : : : "memory" + ); + mLock = 0; + #endif +#else + #if defined(_TTHREAD_WIN32_) + LeaveCriticalSection(&mHandle); + #elif defined(_TTHREAD_POSIX_) + pthread_mutex_unlock(&mHandle); + #endif +#endif + } + + private: +#if defined(_FAST_MUTEX_ASM_) + int mLock; +#else + #if defined(_TTHREAD_WIN32_) + CRITICAL_SECTION mHandle; + #elif defined(_TTHREAD_POSIX_) + pthread_mutex_t mHandle; + #endif +#endif +}; + +} + +#endif // _FAST_MUTEX_H_ + diff --git a/inst/include/utils/logger.h b/inst/include/utils/logger.h new file mode 100644 index 00000000..b691d4da --- /dev/null +++ b/inst/include/utils/logger.h @@ -0,0 +1,73 @@ +#ifndef LOGGER_H +#define LOGGER_H + +#include +#include +#include +#include "threading.h" + +#ifdef RCPP_INTERFACE + #include + // TODO: remove it + using namespace Rcpp; + #define COUT(x) Rcpp::Rcout<<(x)< + // TODO: remove it + using namespace arma; + #define COUT(x) std::cout<<(x)<log(level, text); + #ifdef DEBUG_GMUM + #define DBG(logger, level, text) logger.log(level, text); + #define DBG_PTR(logger, level, text) logger->log(level, text); + #else + #define DBG(logger, level, text) + #define DBG_PTR(logger, level, text) +#endif + + +class LogLevel { +public: + static const int NO_LOGGING_LEVEL = 0; + static const int FATAL_LEVEL = 1; + static const int ERR_LEVEL = 2; + static const int WARNING_LEVEL = 3; + static const int INFO_LEVEL = 4; + static const int DEBUG_LEVEL = 5; + static const int TRACE_LEVEL = 6; +}; + +class Logger { +public: + std::ostream * out; // Pointer to support copy/assignment constructor + int verbosity; + // This mutex exists only because we cannot print to cout in R in separate thread + // And there will be master thread printing logs + gmum::fast_mutex mutex; + + // TODO: It is bad design to add that many macros. We should separate this logic during construction +#ifdef RCPP_INTERFACE + Logger(int level=LogLevel::INFO_LEVEL, + std::ostream & out = Rcpp::Rcout): out(&out), verbosity(level) { + } +#else + Logger(int level=LogLevel::INFO_LEVEL, + std::ostream & out = std::cout): out(&out), verbosity(level) { + } +#endif + + template + void log(int level, T msg) { + if (level <= verbosity) { + gmum::scoped_lock scoped_lock(mutex); + (*out)< + class scoped_lock{ + Mutex * m_mutex; + public: + scoped_lock(Mutex & mutex): m_mutex(&mutex){ + m_mutex->lock(); + } + + scoped_lock(Mutex * mutex): m_mutex(mutex){ + m_mutex->lock(); + } + ~scoped_lock(){ + m_mutex->unlock(); + } + }; + + typedef tthread::thread gmum_thread ; + typedef tthread::condition_variable gmum_condition; + ///sleep current thread (equivalent to boost::this_thread::sleep) + void sleep(int ms); +} + +#endif diff --git a/inst/include/utils/tinythread.h b/inst/include/utils/tinythread.h new file mode 100644 index 00000000..14afc30f --- /dev/null +++ b/inst/include/utils/tinythread.h @@ -0,0 +1,696 @@ +/* -*- mode: c++; tab-width: 2; indent-tabs-mode: nil; -*- +Copyright (c) 2010-2012 Marcus Geelnard + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source + distribution. +*/ + +#ifndef _TINYTHREAD_H_ +#define _TINYTHREAD_H_ + +/// @file +/// @mainpage TinyThread++ API Reference +/// +/// @section intro_sec Introduction +/// TinyThread++ is a minimal, portable implementation of basic threading +/// classes for C++. +/// +/// They closely mimic the functionality and naming of the C++11 standard, and +/// should be easily replaceable with the corresponding std:: variants. +/// +/// @section port_sec Portability +/// The Win32 variant uses the native Win32 API for implementing the thread +/// classes, while for other systems, the POSIX threads API (pthread) is used. +/// +/// @section class_sec Classes +/// In order to mimic the threading API of the C++11 standard, subsets of +/// several classes are provided. The fundamental classes are: +/// @li tthread::thread +/// @li tthread::mutex +/// @li tthread::recursive_mutex +/// @li tthread::condition_variable +/// @li tthread::lock_guard +/// @li tthread::fast_mutex +/// +/// @section misc_sec Miscellaneous +/// The following special keywords are available: #thread_local. +/// +/// For more detailed information (including additional classes), browse the +/// different sections of this documentation. A good place to start is: +/// tinythread.h. + + #define _TTHREAD_POSIX_ + #define _TTHREAD_PLATFORM_DEFINED_ + + #include + #include + #include + #include + +// Generic includes +#include + +/// TinyThread++ version (major number). +#define TINYTHREAD_VERSION_MAJOR 1 +/// TinyThread++ version (minor number). +#define TINYTHREAD_VERSION_MINOR 1 +/// TinyThread++ version (full version). +#define TINYTHREAD_VERSION (TINYTHREAD_VERSION_MAJOR * 100 + TINYTHREAD_VERSION_MINOR) + +// Do we have a fully featured C++11 compiler? +#if (__cplusplus > 199711L) || (defined(__STDCXX_VERSION__) && (__STDCXX_VERSION__ >= 201001L)) + #define _TTHREAD_CPP11_ +#endif + +// ...at least partial C++11? +#if defined(_TTHREAD_CPP11_) || defined(__GXX_EXPERIMENTAL_CXX0X__) || defined(__GXX_EXPERIMENTAL_CPP0X__) + #define _TTHREAD_CPP11_PARTIAL_ +#endif + +// Macro for disabling assignments of objects. +#ifdef _TTHREAD_CPP11_PARTIAL_ + #define _TTHREAD_DISABLE_ASSIGNMENT(name) \ + name(const name&) = delete; \ + name& operator=(const name&) = delete; +#else + #define _TTHREAD_DISABLE_ASSIGNMENT(name) \ + name(const name&); \ + name& operator=(const name&); +#endif + +/// @def thread_local +/// Thread local storage keyword. +/// A variable that is declared with the @c thread_local keyword makes the +/// value of the variable local to each thread (known as thread-local storage, +/// or TLS). Example usage: +/// @code +/// // This variable is local to each thread. +/// thread_local int variable; +/// @endcode +/// @note The @c thread_local keyword is a macro that maps to the corresponding +/// compiler directive (e.g. @c __declspec(thread)). While the C++11 standard +/// allows for non-trivial types (e.g. classes with constructors and +/// destructors) to be declared with the @c thread_local keyword, most pre-C++11 +/// compilers only allow for trivial types (e.g. @c int). So, to guarantee +/// portable code, only use trivial types for thread local storage. +/// @note This directive is currently not supported on Mac OS X (it will give +/// a compiler error), since compile-time TLS is not supported in the Mac OS X +/// executable format. Also, some older versions of MinGW (before GCC 4.x) do +/// not support this directive. +/// @hideinitializer + +#if !defined(_TTHREAD_CPP11_) && !defined(thread_local) + #if defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_CC) || defined(__IBMCPP__) + #define thread_local __thread + #else + #define thread_local __declspec(thread) + #endif +#endif + + +/// Main name space for TinyThread++. +/// This namespace is more or less equivalent to the @c std namespace for the +/// C++11 thread classes. For instance, the tthread::mutex class corresponds to +/// the std::mutex class. +namespace tthread { + +/// Mutex class. +/// This is a mutual exclusion object for synchronizing access to shared +/// memory areas for several threads. The mutex is non-recursive (i.e. a +/// program may deadlock if the thread that owns a mutex object calls lock() +/// on that object). +/// @see recursive_mutex +class mutex { + public: + /// Constructor. + mutex() +#if defined(_TTHREAD_WIN32_) + : mAlreadyLocked(false) +#endif + { +#if defined(_TTHREAD_WIN32_) + InitializeCriticalSection(&mHandle); +#else + pthread_mutex_init(&mHandle, NULL); +#endif + } + + /// Destructor. + ~mutex() + { +#if defined(_TTHREAD_WIN32_) + DeleteCriticalSection(&mHandle); +#else + pthread_mutex_destroy(&mHandle); +#endif + } + + /// Lock the mutex. + /// The method will block the calling thread until a lock on the mutex can + /// be obtained. The mutex remains locked until @c unlock() is called. + /// @see lock_guard + inline void lock() + { +#if defined(_TTHREAD_WIN32_) + EnterCriticalSection(&mHandle); + while(mAlreadyLocked) Sleep(1000); // Simulate deadlock... + mAlreadyLocked = true; +#else + pthread_mutex_lock(&mHandle); +#endif + } + + /// Try to lock the mutex. + /// The method will try to lock the mutex. If it fails, the function will + /// return immediately (non-blocking). + /// @return @c true if the lock was acquired, or @c false if the lock could + /// not be acquired. + inline bool try_lock() + { +#if defined(_TTHREAD_WIN32_) + bool ret = (TryEnterCriticalSection(&mHandle) ? true : false); + if(ret && mAlreadyLocked) + { + LeaveCriticalSection(&mHandle); + ret = false; + } + return ret; +#else + return (pthread_mutex_trylock(&mHandle) == 0) ? true : false; +#endif + } + + /// Unlock the mutex. + /// If any threads are waiting for the lock on this mutex, one of them will + /// be unblocked. + inline void unlock() + { +#if defined(_TTHREAD_WIN32_) + mAlreadyLocked = false; + LeaveCriticalSection(&mHandle); +#else + pthread_mutex_unlock(&mHandle); +#endif + } + + _TTHREAD_DISABLE_ASSIGNMENT(mutex) + + private: +#if defined(_TTHREAD_WIN32_) + CRITICAL_SECTION mHandle; + bool mAlreadyLocked; +#else + pthread_mutex_t mHandle; +#endif + + friend class condition_variable; +}; + +/// Recursive mutex class. +/// This is a mutual exclusion object for synchronizing access to shared +/// memory areas for several threads. The mutex is recursive (i.e. a thread +/// may lock the mutex several times, as long as it unlocks the mutex the same +/// number of times). +/// @see mutex +class recursive_mutex { + public: + /// Constructor. + recursive_mutex() + { +#if defined(_TTHREAD_WIN32_) + InitializeCriticalSection(&mHandle); +#else + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&mHandle, &attr); +#endif + } + + /// Destructor. + ~recursive_mutex() + { +#if defined(_TTHREAD_WIN32_) + DeleteCriticalSection(&mHandle); +#else + pthread_mutex_destroy(&mHandle); +#endif + } + + /// Lock the mutex. + /// The method will block the calling thread until a lock on the mutex can + /// be obtained. The mutex remains locked until @c unlock() is called. + /// @see lock_guard + inline void lock() + { +#if defined(_TTHREAD_WIN32_) + EnterCriticalSection(&mHandle); +#else + pthread_mutex_lock(&mHandle); +#endif + } + + /// Try to lock the mutex. + /// The method will try to lock the mutex. If it fails, the function will + /// return immediately (non-blocking). + /// @return @c true if the lock was acquired, or @c false if the lock could + /// not be acquired. + inline bool try_lock() + { +#if defined(_TTHREAD_WIN32_) + return TryEnterCriticalSection(&mHandle) ? true : false; +#else + return (pthread_mutex_trylock(&mHandle) == 0) ? true : false; +#endif + } + + /// Unlock the mutex. + /// If any threads are waiting for the lock on this mutex, one of them will + /// be unblocked. + inline void unlock() + { +#if defined(_TTHREAD_WIN32_) + LeaveCriticalSection(&mHandle); +#else + pthread_mutex_unlock(&mHandle); +#endif + } + + _TTHREAD_DISABLE_ASSIGNMENT(recursive_mutex) + + private: +#if defined(_TTHREAD_WIN32_) + CRITICAL_SECTION mHandle; +#else + pthread_mutex_t mHandle; +#endif + + friend class condition_variable; +}; + +/// Lock guard class. +/// The constructor locks the mutex, and the destructor unlocks the mutex, so +/// the mutex will automatically be unlocked when the lock guard goes out of +/// scope. Example usage: +/// @code +/// mutex m; +/// int counter; +/// +/// void increment() +/// { +/// lock_guard guard(m); +/// ++ counter; +/// } +/// @endcode + +template +class lock_guard { + public: + typedef T mutex_type; + + lock_guard() : mMutex(0) {} + + /// The constructor locks the mutex. + explicit lock_guard(mutex_type &aMutex) + { + mMutex = &aMutex; + mMutex->lock(); + } + + /// The destructor unlocks the mutex. + ~lock_guard() + { + if(mMutex) + mMutex->unlock(); + } + + private: + mutex_type * mMutex; +}; + +/// Condition variable class. +/// This is a signalling object for synchronizing the execution flow for +/// several threads. Example usage: +/// @code +/// // Shared data and associated mutex and condition variable objects +/// int count; +/// mutex m; +/// condition_variable cond; +/// +/// // Wait for the counter to reach a certain number +/// void wait_counter(int targetCount) +/// { +/// lock_guard guard(m); +/// while(count < targetCount) +/// cond.wait(m); +/// } +/// +/// // Increment the counter, and notify waiting threads +/// void increment() +/// { +/// lock_guard guard(m); +/// ++ count; +/// cond.notify_all(); +/// } +/// @endcode +class condition_variable { + public: + /// Constructor. +#if defined(_TTHREAD_WIN32_) + condition_variable(); +#else + condition_variable() + { + pthread_cond_init(&mHandle, NULL); + } +#endif + + /// Destructor. +#if defined(_TTHREAD_WIN32_) + ~condition_variable(); +#else + ~condition_variable() + { + pthread_cond_destroy(&mHandle); + } +#endif + + /// Wait for the condition. + /// The function will block the calling thread until the condition variable + /// is woken by @c notify_one(), @c notify_all() or a spurious wake up. + /// @param[in] aMutex A mutex that will be unlocked when the wait operation + /// starts, an locked again as soon as the wait operation is finished. + template + inline void wait(_mutexT &aMutex) + { +#if defined(_TTHREAD_WIN32_) + // Increment number of waiters + EnterCriticalSection(&mWaitersCountLock); + ++ mWaitersCount; + LeaveCriticalSection(&mWaitersCountLock); + + // Release the mutex while waiting for the condition (will decrease + // the number of waiters when done)... + aMutex.unlock(); + _wait(); + aMutex.lock(); +#else + pthread_cond_wait(&mHandle, &aMutex.mHandle); +#endif + } + + /// Notify one thread that is waiting for the condition. + /// If at least one thread is blocked waiting for this condition variable, + /// one will be woken up. + /// @note Only threads that started waiting prior to this call will be + /// woken up. +#if defined(_TTHREAD_WIN32_) + void notify_one(); +#else + inline void notify_one() + { + pthread_cond_signal(&mHandle); + } +#endif + + /// Notify all threads that are waiting for the condition. + /// All threads that are blocked waiting for this condition variable will + /// be woken up. + /// @note Only threads that started waiting prior to this call will be + /// woken up. +#if defined(_TTHREAD_WIN32_) + void notify_all(); +#else + inline void notify_all() + { + pthread_cond_broadcast(&mHandle); + } +#endif + + _TTHREAD_DISABLE_ASSIGNMENT(condition_variable) + + private: +#if defined(_TTHREAD_WIN32_) + void _wait(); + HANDLE mEvents[2]; ///< Signal and broadcast event HANDLEs. + unsigned int mWaitersCount; ///< Count of the number of waiters. + CRITICAL_SECTION mWaitersCountLock; ///< Serialize access to mWaitersCount. +#else + pthread_cond_t mHandle; +#endif +}; + + +/// Thread class. +class thread { + public: +#if defined(_TTHREAD_WIN32_) + typedef HANDLE native_handle_type; +#else + typedef pthread_t native_handle_type; +#endif + + class id; + + /// Default constructor. + /// Construct a @c thread object without an associated thread of execution + /// (i.e. non-joinable). + thread() : mHandle(0), mNotAThread(true) +#if defined(_TTHREAD_WIN32_) + , mWin32ThreadID(0) +#endif + {} + + /// Thread starting constructor. + /// Construct a @c thread object with a new thread of execution. + /// @param[in] aFunction A function pointer to a function of type: + /// void fun(void * arg) + /// @param[in] aArg Argument to the thread function. + /// @note This constructor is not fully compatible with the standard C++ + /// thread class. It is more similar to the pthread_create() (POSIX) and + /// CreateThread() (Windows) functions. + thread(void (*aFunction)(void *), void * aArg); + + /// Destructor. + /// @note If the thread is joinable upon destruction, @c std::terminate() + /// will be called, which terminates the process. It is always wise to do + /// @c join() before deleting a thread object. + ~thread(); + + /// Wait for the thread to finish (join execution flows). + /// After calling @c join(), the thread object is no longer associated with + /// a thread of execution (i.e. it is not joinable, and you may not join + /// with it nor detach from it). + void join(); + + /// Check if the thread is joinable. + /// A thread object is joinable if it has an associated thread of execution. + bool joinable() const; + + /// Detach from the thread. + /// After calling @c detach(), the thread object is no longer assicated with + /// a thread of execution (i.e. it is not joinable). The thread continues + /// execution without the calling thread blocking, and when the thread + /// ends execution, any owned resources are released. + void detach(); + + /// Return the thread ID of a thread object. + id get_id() const; + + /// Get the native handle for this thread. + /// @note Under Windows, this is a @c HANDLE, and under POSIX systems, this + /// is a @c pthread_t. + inline native_handle_type native_handle() + { + return mHandle; + } + + /// Determine the number of threads which can possibly execute concurrently. + /// This function is useful for determining the optimal number of threads to + /// use for a task. + /// @return The number of hardware thread contexts in the system. + /// @note If this value is not defined, the function returns zero (0). + static unsigned hardware_concurrency(); + + _TTHREAD_DISABLE_ASSIGNMENT(thread) + + private: + native_handle_type mHandle; ///< Thread handle. + mutable mutex mDataMutex; ///< Serializer for access to the thread private data. + bool mNotAThread; ///< True if this object is not a thread of execution. +#if defined(_TTHREAD_WIN32_) + unsigned int mWin32ThreadID; ///< Unique thread ID (filled out by _beginthreadex). +#endif + + // This is the internal thread wrapper function. +#if defined(_TTHREAD_WIN32_) + static unsigned WINAPI wrapper_function(void * aArg); +#else + static void * wrapper_function(void * aArg); +#endif +}; + +/// Thread ID. +/// The thread ID is a unique identifier for each thread. +/// @see thread::get_id() +class thread::id { + public: + /// Default constructor. + /// The default constructed ID is that of thread without a thread of + /// execution. + id() : mId(0) {}; + + id(unsigned long int aId) : mId(aId) {}; + + id(const id& aId) : mId(aId.mId) {}; + + inline id & operator=(const id &aId) + { + mId = aId.mId; + return *this; + } + + inline friend bool operator==(const id &aId1, const id &aId2) + { + return (aId1.mId == aId2.mId); + } + + inline friend bool operator!=(const id &aId1, const id &aId2) + { + return (aId1.mId != aId2.mId); + } + + inline friend bool operator<=(const id &aId1, const id &aId2) + { + return (aId1.mId <= aId2.mId); + } + + inline friend bool operator<(const id &aId1, const id &aId2) + { + return (aId1.mId < aId2.mId); + } + + inline friend bool operator>=(const id &aId1, const id &aId2) + { + return (aId1.mId >= aId2.mId); + } + + inline friend bool operator>(const id &aId1, const id &aId2) + { + return (aId1.mId > aId2.mId); + } + + inline friend std::ostream& operator <<(std::ostream &os, const id &obj) + { + os << obj.mId; + return os; + } + + private: + unsigned long int mId; +}; + + +// Related to - minimal to be able to support chrono. +typedef long long __intmax_t; + +/// Minimal implementation of the @c ratio class. This class provides enough +/// functionality to implement some basic @c chrono classes. +template <__intmax_t N, __intmax_t D = 1> class ratio { + public: + static double _as_double() { return double(N) / double(D); } +}; + +/// Minimal implementation of the @c chrono namespace. +/// The @c chrono namespace provides types for specifying time intervals. +namespace chrono { + /// Duration template class. This class provides enough functionality to + /// implement @c this_thread::sleep_for(). + template > class duration { + private: + _Rep rep_; + public: + typedef _Rep rep; + typedef _Period period; + + /// Construct a duration object with the given duration. + template + explicit duration(const _Rep2& r) : rep_(r) { + + } + + /// Return the value of the duration object. + rep count() const + { + return rep_; + } + }; + + // Standard duration types. + typedef duration<__intmax_t, ratio<1, 1000000000> > nanoseconds; ///< Duration with the unit nanoseconds. + typedef duration<__intmax_t, ratio<1, 1000000> > microseconds; ///< Duration with the unit microseconds. + typedef duration<__intmax_t, ratio<1, 1000> > milliseconds; ///< Duration with the unit milliseconds. + typedef duration<__intmax_t> seconds; ///< Duration with the unit seconds. + typedef duration<__intmax_t, ratio<60> > minutes; ///< Duration with the unit minutes. + typedef duration<__intmax_t, ratio<3600> > hours; ///< Duration with the unit hours. +} + +/// The namespace @c this_thread provides methods for dealing with the +/// calling thread. +namespace this_thread { + /// Return the thread ID of the calling thread. + thread::id get_id(); + + /// Yield execution to another thread. + /// Offers the operating system the opportunity to schedule another thread + /// that is ready to run on the current processor. + inline void yield() + { +#if defined(_TTHREAD_WIN32_) + Sleep(0); +#else + sched_yield(); +#endif + } + + /// Blocks the calling thread for a period of time. + /// @param[in] aTime Minimum time to put the thread to sleep. + /// Example usage: + /// @code + /// // Sleep for 100 milliseconds + /// this_thread::sleep_for(chrono::milliseconds(100)); + /// @endcode + /// @note Supported duration types are: nanoseconds, microseconds, + /// milliseconds, seconds, minutes and hours. + template void sleep_for(const chrono::duration<_Rep, _Period>& aTime) + { +#if defined(_TTHREAD_WIN32_) + Sleep(int(double(aTime.count()) * (1000.0 * _Period::_as_double()) + 0.5)); +#else + usleep(int(double(aTime.count()) * (1000000.0 * _Period::_as_double()) + 0.5)); +#endif + } +} + +} + +// Define/macro cleanup +#undef _TTHREAD_DISABLE_ASSIGNMENT + +#endif // _TINYTHREAD_H_ diff --git a/inst/include/utils/utils.h b/inst/include/utils/utils.h new file mode 100644 index 00000000..614fff2b --- /dev/null +++ b/inst/include/utils/utils.h @@ -0,0 +1,193 @@ +/* + * File: Utils.h + * Author: staszek + * + * Created on 10 sierpieĹ„ 2012, 16:53 + */ + +#ifndef UTILS_H +#define UTILS_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "boost/foreach.hpp" +#include +#include +#include +using namespace std; +typedef vector VI; +#define FOR(x, b, e) for(size_t x=b; x<=(e); ++x) +#define FORD(x, b, e) for(size_t x=b; x>=(e); ––x) +#define REP(x, n) for(size_t x=0; x<(n); ++x) +#define VAR(v,n) typeof(n) v=(n) +#define SIZE(x) (int)(x).size() +#define FOREACH(i,c) BOOST_FOREACH(i, c) //for(VAR(i,(c).begin());i!=(c).end();++i) + +#ifdef RCPP_INTERFACE +#include +#endif + +#ifdef DEBUG_GMUM +#ifdef RCPP_INTERFACE +#include +using namespace Rcpp; +#define DBG(logger, level, text) logger.log(level, text); +#define REPORT(x) Rcpp::Rcout<<#x<<"="<<(x)< +std::string to_str(const T& x) { + stringstream ss; + ss << x; + return ss.str(); +} + +struct BasicException: public std::exception { + std::string s; + BasicException(std::string ss) : + s(ss) { + } + ~BasicException() throw () { + } // Updated + const char* what() const throw () { + return s.c_str(); + } +}; + +//conflicting with boost namespace +namespace gmum { +template +std::string to_string(const T& x) { + stringstream ss; + ss << x; + return ss.str(); +} +} + +const int __one__ = 1; +const bool isCpuLittleEndian = 1 == *(char*) (&__one__); // CPU endianness +const bool isFileLittleEndian = false; // output endianness - you choose :) + +// ED - environment dependent utils + +int ed_c_rand(); +void ed_c_srand(unsigned int); + +static int __seed(int seed) { + ed_c_srand(seed); + return 0; +} + +template +void write_array(T* begin, T*end) { + for (; begin != end; ++begin) { + std::cerr << *begin << ","; + } + std::cerr << endl; +} + + +#define RANDOM_INT(rng, min, max) (rng() % (max - min +1) + min) + +static int __rnd(int min, int max) { + return (ed_c_rand() % (max - min + 1) + min); +} + +static int __int_rnd(int min, int max) { + return (ed_c_rand() % (max - min + 1) + min); +} + +static double __double_rnd(double min, double max) { + return min + (max - min) * ((double) ed_c_rand()) / RAND_MAX; +} + +static void _write_bin(ostream & out, double v) { + if (isCpuLittleEndian ^ isFileLittleEndian) { + // Switch between the two + char data[8], *pDouble = (char*) (double*) (&v); + for (int i = 0; i < 8; ++i) { + data[i] = pDouble[7 - i]; + } + out.write(data, 8); + } else + out.write((char*) (&v), 8); +} + +static inline void _write_bin_vect(ostream & out, vector & v) { + _write_bin(out, (double) v.size()); + // TODO: remove + REPORT(v.size()); + for (size_t i = 0; i < v.size(); ++i) { + _write_bin(out, v[i]); + } +} + +static inline double _load_bin(istream & in) { + char data[8]; + double res; + in.read(data, 8); + if (isCpuLittleEndian ^ isFileLittleEndian) { + char data_load[8]; + // Switch between the two + for (int i = 0; i < 8; ++i) { + data_load[i] = data[7 - i]; + } + memcpy((char*) &res, &data_load[0], 8); + } else + memcpy((char*) &res, &data[0], 8); + + return res; +} + +static inline vector _load_bin_vector(istream & in) { + int N = (int) _load_bin(in); + vector x; + x.reserve(N); + REPORT(N); + for (int i = 0; i < N; ++i) { + x.push_back(_load_bin(in)); + } + return x; +} + +int check_argc(const char*); +int check_argc(const std::string); +char** to_argv(const char*); +char** to_argv(const std::string); +char** free_argv(int argc, char** argv); + +/// C rand() implemented in RCpp +int rcpp_c_rand(); + +#endif /* UTILS_H */ \ No newline at end of file diff --git a/inst/tests/test_hello_gmum.r b/inst/tests/test_hello_gmum.r deleted file mode 100644 index ab90b423..00000000 --- a/inst/tests/test_hello_gmum.r +++ /dev/null @@ -1,7 +0,0 @@ -library('testthat') -library('gmum.r') - -test_that('Hello gmum working', { - expect_that(hello_gmum(), equals(c('hello', 'gmum'))) - -}) \ No newline at end of file diff --git a/libs/README.md b/libs/README.md new file mode 100644 index 00000000..1b535d20 --- /dev/null +++ b/libs/README.md @@ -0,0 +1 @@ +(devnote) - put gmum.r.dll in x64 and i386 for binary package. These can be produced by ``devtools::install(".", args="--arch=i386/x64")`` assuming Windows is configured for compiling Rcpp package. \ No newline at end of file diff --git a/man/CEC.Rd b/man/CEC.Rd new file mode 100644 index 00000000..1e34e594 --- /dev/null +++ b/man/CEC.Rd @@ -0,0 +1,88 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{CEC} +\alias{CEC} +\title{Cross-Entropy Clustering} +\usage{ +CEC(x = NULL, k = 0, method.type = "standard", method.init = "kmeans++", + params.r = 0, params.cov = matrix(0), params.centroids = NULL, + params.mix = NULL, params.function = NULL, control.nstart = 10, + control.eps = 0.05, control.itmax = 25, log.energy = TRUE, + log.ncluster = TRUE, seed = NULL) +} +\arguments{ +\item{x}{Numeric matrix of data.} + +\item{k}{Initial number of clusters.} + +\item{method.type}{Type of clustering (Gaussian family). +\enumerate{ +\item 'diagonal' Gaussians with diagonal covariance. The clustering will try to divide the data into ellipsoid with radiuses parallel to coordinate axes +\item 'fixed_spherical' Spherical (radial) Gaussian densities (additional parameter - radius) +\item 'fixed_covariance' The clustering will have the tendency to divide the data into clusters resembling the unit circles in the Mahalanobis distance (additional parameter - covaraince matrix required) +\item 'func' Own function dependent on m and sigma (additional parameter) +\item 'standard' We divide dataset into ellipsoid-like clusters without any preferences (default) +\item 'spherical' The clustering will try to divide the data into circles of arbitrary sizes}} + +\item{method.init}{Method to initialize clusters. +\enumerate{ +\item 'centroids' +\item 'kmeans++' +\item 'random'}} + +\item{params.r}{Radius for spherical family.} + +\item{params.cov}{Covariance matrix for covariance family.} + +\item{params.centroids}{List of centroids.} + +\item{params.mix}{List of cluster with mixed Gaussian types.} + +\item{params.function}{User energy function} + +\item{control.nstart}{How many times to perform algorithm.} + +\item{control.eps}{What change of value should terminate algorithm.} + +\item{control.itmax}{Maximum number of iterations at each start.} + +\item{log.energy}{Records collected energy of all clusters in each iteration.} + +\item{log.ncluster}{Records number of clusters in each iteration.} + +\item{seed}{User seed} +} +\description{ +Create CEC model object +} +\examples{ +\dontrun{ +CEC(k=3, x=dataset) + +CEC(k=3, x=dataset, control.nstart=10, method.type='spherical', control.eps=0.05) + +CEC(k=2, x=dataset, method.type='spherical', method.init='centroids', + params.centroids=list(c(-0.5,0.5),c(0,0))) + +CEC(k=5, x=dataset, method.type='fixed_spherical', params.r=0.01, + control.nstart=10, control.eps=0.07) + +CEC(k=5, x=dataset, method.type='fixed_covariance', + params.cov=matrix(c(0.03,0,0,0.01),2), control.nstart=10, control.eps=0.06) + +CEC(k=1, x=dataset, method.type='func', + params.function='name_of_my_own_function') + +fixed_spherical_cluster_param = list(method.type = 'fixed_spherical', params.r = 0.001), +covariance_cluster_param = list(method.type = 'fixed_covariance', + params.cov=matrix(c(0.05, 0, 0, 0.001), 2)) +CEC(x = dataset, params.mix = list(covariance_cluster_param, + fixed_spherical_cluster_param, fixed_spherical_cluster_param, + fixed_spherical_cluster_param, fixed_spherical_cluster_param), control.nstart = 10) + +p1 = list(method.type='spherical', k=3) +p2 = list(method.type='diagonal', k=2) +CEC(x=dataset, params.mix=list(p1, p2)) +} +} + diff --git a/man/MultiClassSVM-class.Rd b/man/MultiClassSVM-class.Rd new file mode 100644 index 00000000..c46f05d2 --- /dev/null +++ b/man/MultiClassSVM-class.Rd @@ -0,0 +1,10 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\docType{class} +\name{MultiClassSVM-class} +\alias{MultiClassSVM-class} +\title{Class MultiClassSVM} +\description{ +Class \code{MultiClassSVM} defines a multiclass SVM model class. +} + diff --git a/man/Rcpp_CecConfiguration-class.Rd b/man/Rcpp_CecConfiguration-class.Rd new file mode 100644 index 00000000..2cc86f64 --- /dev/null +++ b/man/Rcpp_CecConfiguration-class.Rd @@ -0,0 +1,10 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\docType{class} +\name{Rcpp_CecConfiguration-class} +\alias{Rcpp_CecConfiguration-class} +\title{Class Rcpp_CecConfiguration.} +\description{ +Class \code{Rcpp_CecConfiguration} defines a CEC model configuration class. +} + diff --git a/man/Rcpp_CecModel-class.Rd b/man/Rcpp_CecModel-class.Rd new file mode 100644 index 00000000..237f6e3c --- /dev/null +++ b/man/Rcpp_CecModel-class.Rd @@ -0,0 +1,10 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\docType{class} +\name{Rcpp_CecModel-class} +\alias{Rcpp_CecModel-class} +\title{Class Rcpp_CecModel.} +\description{ +Class \code{Rcpp_CecModel} defines a CEC model class. +} + diff --git a/man/Rcpp_GNGServer-class.Rd b/man/Rcpp_GNGServer-class.Rd new file mode 100644 index 00000000..1b70c8d4 --- /dev/null +++ b/man/Rcpp_GNGServer-class.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\docType{class} +\name{Rcpp_GNGServer-class} +\alias{Rcpp_GNGServer-class} +\title{Class Rcpp_GNGServer.} +\description{ +Class \code{Rcpp_GNGServer} defines a GNGServer class. +} +\section{Methods}{ + +\describe{ +\item{\code{getClustering()}}{Rcpp::NumericVector getClustering()} + +\item{\code{getCurrentIteration()}}{unsigned int getCurrentIteration() const } + +\item{\code{getDatasetSize()}}{unsigned int getDatasetSize() const } + +\item{\code{getErrorStatistics()}}{Rcpp::NumericVector getErrorStatistics()} + +\item{\code{getMeanError()}}{double getMeanError()} + +\item{\code{getNode(...)}}{Rcpp::List getNode(int)} + +\item{\code{getNumberNodes()}}{unsigned int getNumberNodes() const } +}} + diff --git a/man/Rcpp_SVMClient-class.Rd b/man/Rcpp_SVMClient-class.Rd new file mode 100644 index 00000000..7b582ea5 --- /dev/null +++ b/man/Rcpp_SVMClient-class.Rd @@ -0,0 +1,10 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\docType{class} +\name{Rcpp_SVMClient-class} +\alias{Rcpp_SVMClient-class} +\title{Class Rcpp_SVMClient.} +\description{ +Class \code{Rcpp_SVMClient} defines a SVM model class. +} + diff --git a/man/Tset.Rd b/man/Tset.Rd new file mode 100644 index 00000000..aa93c57e --- /dev/null +++ b/man/Tset.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{Tset} +\alias{Tset} +\title{Tset} +\description{ +Simple dataset in the form of T letter +} +\keyword{data} + diff --git a/man/calculateCentroids.Rd b/man/calculateCentroids.Rd new file mode 100644 index 00000000..50640cf3 --- /dev/null +++ b/man/calculateCentroids.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{calculateCentroids} +\alias{calculateCentroids} +\title{calculateCentroids} +\usage{ +calculateCentroids(object, + community.detection.algorithm = spinglass.community) +} +\arguments{ +\item{object}{GNG object} + +\item{community.detection.algorithm}{Used algorithm from igraph package, by default spinglass.community} +} +\description{ +Using passed community.detection finds communities and for each community pick node with biggest betweenness score +} +\details{ +Get centroids +} +\examples{ +\dontrun{ +gng <- GNG(gng.preset.sphere(100)) +print(node(gng, calculateCentroids(gng)[1])$pos) +} +} + diff --git a/man/caret.gmumSvmLinear.Rd b/man/caret.gmumSvmLinear.Rd new file mode 100644 index 00000000..cf8986a5 --- /dev/null +++ b/man/caret.gmumSvmLinear.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\docType{data} +\name{caret.gmumSvmLinear} +\alias{caret.gmumSvmLinear} +\title{Caret model representation for SVM with linear kernel} +\format{List of caret specific values} +\usage{ +caret.gmumSvmLinear +} +\description{ +Supply as parameter "method" in the caret::train function +} +\examples{ +\dontrun{ +model <- train(Class ~ ., data = training, +method = caret.gmumSvmLinear, +preProc = c("center", "scale"), +tuneLength = 8, +trControl = fitControl, +tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +core = "libsvm", # gmum.R parameter - pick library +verbosity = 0 # no outputs +) +} +} +\keyword{datasets} + diff --git a/man/caret.gmumSvmPoly.Rd b/man/caret.gmumSvmPoly.Rd new file mode 100644 index 00000000..d507bb63 --- /dev/null +++ b/man/caret.gmumSvmPoly.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\docType{data} +\name{caret.gmumSvmPoly} +\alias{caret.gmumSvmPoly} +\title{Caret model representation for SVM with linear kernel} +\format{List of caret specific values} +\usage{ +caret.gmumSvmPoly +} +\description{ +Supply as parameter "method" in the caret::poly function +} +\examples{ +\dontrun{ +model <- train(Class ~ ., data = training, +method = caret.gmumSvmPoly, +preProc = c("center", "scale"), +tuneLength = 8, +trControl = fitControl, +tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +core = "libsvm", # gmum.R parameter - pick library +verbosity = 0 # no outputs +) +} +} +\keyword{datasets} + diff --git a/man/caret.gmumSvmRadial.Rd b/man/caret.gmumSvmRadial.Rd new file mode 100644 index 00000000..1d1b1395 --- /dev/null +++ b/man/caret.gmumSvmRadial.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\docType{data} +\name{caret.gmumSvmRadial} +\alias{caret.gmumSvmRadial} +\title{Caret model representation for SVM with radial kernel} +\format{List of caret specific values} +\usage{ +caret.gmumSvmRadial +} +\description{ +Supply as parameter "method" in the caret::train function +} +\examples{ +\dontrun{ +model <- train(Class ~ ., data = training, +method = caret.gmumSvmRadial, +preProc = c("center", "scale"), +tuneLength = 8, +trControl = fitControl, +tuneGrid = expand.grid(C=10^(c(-4:4)), gamma=10^(c(-4:4))), +core = "libsvm", # gmum.R parameter - pick library +verbosity = 0 # no outputs +) +} +} +\keyword{datasets} + diff --git a/man/cec.ellipsegauss.Rd b/man/cec.ellipsegauss.Rd new file mode 100644 index 00000000..a6ec7f3e --- /dev/null +++ b/man/cec.ellipsegauss.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{cec.ellipsegauss} +\alias{cec.ellipsegauss} +\title{cec.ellipsegauss} +\description{ +Simple dataset consisting in data drawn from set of elliptical gausses +} +\keyword{data} + diff --git a/man/cec.ellipsegauss.extra.Rd b/man/cec.ellipsegauss.extra.Rd new file mode 100644 index 00000000..ed9980f0 --- /dev/null +++ b/man/cec.ellipsegauss.extra.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{cec.ellipsegauss.extra} +\alias{cec.ellipsegauss.extra} +\title{cec.ellipsegauss.extra} +\description{ +Extra information for dataset cec.ellipsegauss (energy and cluster assignment) +} +\keyword{data} + diff --git a/man/cec.mouse1.spherical.Rd b/man/cec.mouse1.spherical.Rd new file mode 100644 index 00000000..d529febc --- /dev/null +++ b/man/cec.mouse1.spherical.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{cec.mouse1.spherical} +\alias{cec.mouse1.spherical} +\title{cec.mouse1.spherical} +\description{ +Simple mouse-shaped dataset +} +\keyword{data} + diff --git a/man/cec.mouse1.spherical.extra.Rd b/man/cec.mouse1.spherical.extra.Rd new file mode 100644 index 00000000..5c1bcca9 --- /dev/null +++ b/man/cec.mouse1.spherical.extra.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{cec.mouse1.spherical.extra} +\alias{cec.mouse1.spherical.extra} +\title{cec.mouse1.spherical.extra} +\description{ +Extra information for dataset cec.mouse1.extra (energy and cluster assignment) +} +\keyword{data} + diff --git a/man/centers.Rd b/man/centers.Rd new file mode 100644 index 00000000..62b7daa4 --- /dev/null +++ b/man/centers.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{centers} +\alias{centers} +\alias{centers,Rcpp_CecModel-method} +\title{centers} +\usage{ +centers(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print centers of clusters +} +\examples{ +\dontrun{ +centers(c) +} +} + diff --git a/man/clustering-methods.Rd b/man/clustering-methods.Rd new file mode 100644 index 00000000..42716137 --- /dev/null +++ b/man/clustering-methods.Rd @@ -0,0 +1,36 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R, R/gng.R +\docType{methods} +\name{clustering} +\alias{clustering} +\alias{clustering.Rcpp_CecModel} +\alias{clustering.Rcpp_GNGServer} +\title{clustering} +\usage{ +clustering(c) + +\method{clustering}{Rcpp_CecModel}(c) + +\method{clustering}{Rcpp_GNGServer}(c) +} +\arguments{ +\item{c}{Object with clusters} +} +\description{ +Print labels assigned + +Print labels assigned + +Gets vector with node indexes assigned to examples in the dataset +} +\examples{ +\dontrun{ +clustering(c) +} +\dontrun{ +clustering(c) +} +gng <- GNG(gng.preset.sphere(100)) +clustering(gng) +} + diff --git a/man/convertToIGraph.Rd b/man/convertToIGraph.Rd new file mode 100644 index 00000000..6b78ef7e --- /dev/null +++ b/man/convertToIGraph.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{convertToIGraph} +\alias{convertToIGraph} +\title{convertToIGraph} +\usage{ +convertToIGraph(object, calculate.dist = TRUE) +} +\arguments{ +\item{object}{GNG object} + +\item{calculate.dist}{If true will calculate all \code{n^2} distances in the graph} +} +\description{ +Converts GNG to igraph object, where every vertex contains attributes gng.index, error, data.label and 3 first spatial coordinates (as attributes v0, v1, v2). +Additionally utility attribute is present if utility GNG is used. +} + diff --git a/man/covMatrix.Rd b/man/covMatrix.Rd new file mode 100644 index 00000000..1611dca6 --- /dev/null +++ b/man/covMatrix.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{covMatrix} +\alias{covMatrix} +\alias{covMatrix,Rcpp_CecModel-method} +\title{covMatrix} +\usage{ +covMatrix(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print covariances of clusters +} +\examples{ +\dontrun{ +covMatrix(c) +} +} + diff --git a/man/energy.Rd b/man/energy.Rd new file mode 100644 index 00000000..86b44f1f --- /dev/null +++ b/man/energy.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{energy} +\alias{energy} +\alias{energy,Rcpp_CecModel-method} +\title{energy} +\usage{ +energy(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print result energy of clustering found +} +\examples{ +\dontrun{ +energy(c) +} +} + diff --git a/man/errorStatistics.Rd b/man/errorStatistics.Rd new file mode 100644 index 00000000..fed34f6f --- /dev/null +++ b/man/errorStatistics.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{errorStatistics} +\alias{errorStatistics} +\title{errorStatistics} +\usage{ +errorStatistics(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Gets vector with errors for every second of execution +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +errorStatistics(gng) +} + diff --git a/man/findClosests.Rd b/man/findClosests.Rd new file mode 100644 index 00000000..e5196650 --- /dev/null +++ b/man/findClosests.Rd @@ -0,0 +1,30 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{findClosests} +\alias{findClosests} +\title{findClosests} +\usage{ +findClosests(object, node.ids, x) +} +\arguments{ +\item{object}{GNG object} + +\item{node.ids}{List of indexes of nodes in gng.} + +\item{x}{Can be either \code{vector} or \code{data.frame.}} +} +\description{ +Finds closest node from given list to vector. Often used together with calculateCentroids +} +\details{ +Find closest node +} +\examples{ +\dontrun{ +gng <- GNG(gng.preset.sphere(100)) +# Find closest centroid to c(1,1,1) +found.centroids <- calculateCentroids(gng) +findClosests(gng, found.centroids, c(1,1,1)) +} +} + diff --git a/man/get.wine.dataset.X.Rd b/man/get.wine.dataset.X.Rd new file mode 100644 index 00000000..d7e2d9f7 --- /dev/null +++ b/man/get.wine.dataset.X.Rd @@ -0,0 +1,15 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.utils.R +\name{get.wine.dataset.X} +\alias{get.wine.dataset.X} +\title{get.wine.dataset.X} +\usage{ +get.wine.dataset.X(scale = TRUE) +} +\arguments{ +\item{scale}{if TRUE will perform feature scaling} +} +\description{ +Retrieves wine dataset design matrix from UCI repository +} + diff --git a/man/get.wine.dataset.y.Rd b/man/get.wine.dataset.y.Rd new file mode 100644 index 00000000..08894374 --- /dev/null +++ b/man/get.wine.dataset.y.Rd @@ -0,0 +1,12 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.utils.R +\name{get.wine.dataset.y} +\alias{get.wine.dataset.y} +\title{get.wine.dataset.y} +\usage{ +get.wine.dataset.y() +} +\description{ +Retrieves wine dataset labels from UCI repository +} + diff --git a/man/getDataset.Rd b/man/getDataset.Rd new file mode 100644 index 00000000..467503b5 --- /dev/null +++ b/man/getDataset.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{getDataset} +\alias{getDataset} +\alias{getDataset,Rcpp_CecModel-method} +\title{getDataset} +\usage{ +getDataset(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print input dataset +} +\examples{ +\dontrun{ +getDataset(c) +} +} + diff --git a/man/gmum-utils-package.Rd b/man/gmum-utils-package.Rd deleted file mode 100755 index ae1d293c..00000000 --- a/man/gmum-utils-package.Rd +++ /dev/null @@ -1,40 +0,0 @@ -\name{gmum-utils-package} -\alias{gmum-utils-package} -\alias{gmum-utils} -\docType{package} -\title{ -What the package does (short line) -} -\description{ -More about what it does (maybe more than one line) -~~ A concise (1-5 lines) description of the package ~~ -} -\details{ -\tabular{ll}{ -Package: \tab gmum-utils\cr -Type: \tab Package\cr -Version: \tab 1.0\cr -Date: \tab 2014-02-25\cr -License: \tab What license is it under?\cr -} -~~ An overview of how to use the package, including the most important ~~ -~~ functions ~~ -} -\author{ -Who wrote it - -Maintainer: Who to complain to -} -\references{ -~~ Literature or other references for background information ~~ -} -~~ Optionally other standard keywords, one per line, from file KEYWORDS in ~~ -~~ the R documentation directory ~~ -\keyword{ package } -\seealso{ -~~ Optional links to other man pages, e.g. ~~ -~~ \code{\link[:-package]{}} ~~ -} -\examples{ -%% ~~ simple examples of the most important functions ~~ -} diff --git a/man/gng.Rd b/man/gng.Rd new file mode 100644 index 00000000..a823695f --- /dev/null +++ b/man/gng.Rd @@ -0,0 +1,73 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{GNG} +\alias{GNG} +\title{Constructor of GrowingNeuralGas object.} +\usage{ +GNG(x = NULL, labels = c(), beta = 0.99, alpha = 0.5, + max.nodes = 1000, eps.n = 6e-04, eps.w = 0.05, max.edge.age = 200, + train.online = FALSE, max.iter = 200, dim = -1, + min.improvement = 0.001, lambda = 200, verbosity = 0, seed = -1, + k = NULL) +} +\arguments{ +\item{x}{Passed data (matrix of data.frame) for offline training} + +\item{labels}{Every example can be associated with labels that are added to nodes later. By default empty} + +\item{beta}{Decrease the error variables of all node +nodes by this fraction (forgetting rate). Default 0.99} + +\item{alpha}{Decrease the error variables of the nodes neighboring to +the newly inserted node by this fraction. Default 0.5} + +\item{max.nodes}{Maximum number of nodes +(after reaching this size it will continue running, but won't add new nodes)} + +\item{eps.n}{How strongly adapt neighbour node. Default \code{0.0006}} + +\item{eps.w}{How strongly adapt winning node. Default \code{0.05}} + +\item{max.edge.age}{Maximum edge age. Decrease to increase speed of change of graph topology. Default \code{200}} + +\item{train.online}{default FALSE. If used will run in online fashion} + +\item{max.iter}{Uf training offline will stop if exceedes max.iter iterations. Default \code{200}} + +\item{dim}{Used for training online, specifies training example size} + +\item{min.improvement}{Used for offline (default) training. +Controls stopping criterion, decrease if training stops too early. Default \code{1e-3}} + +\item{lambda}{Every lambda iteration is added new vertex. Default 200} + +\item{verbosity}{How verbose should the process be, as integer from \eqn{[0,6]}, default: \code{0}} + +\item{seed}{Seed for internal randomization} + +\item{k}{Utility constant, by default turned off. Good value is 1.3. Constant controlling speed of erasing obsolete nodes, +see \url{http://sund.de/netze/applets/gng/full/tex/DemoGNG/node20.html}} +} +\description{ +Construct GNG object. Can be used to train offline, or online. +} +\examples{ +\dontrun{ +X <- gng.preset.sphere(100) +y <- round(runif(100)) +# Train in an offline manner +gng <- GNG(X, labels=y, max.nodes=20) +# Plot +plot(gng) + +# Train in an online manner with utility (erasing obsolete nodes) +gng <- GNG(max.nodes=20, train.online=TRUE, k=1.3, dim=3) +insertExamples(gng, X, labels=y) +run(gng) +Sys.sleep(10) +terminate(gng) +# Plot +plot(gng) +} +} + diff --git a/man/gng.plot.layout.v2d.Rd b/man/gng.plot.layout.v2d.Rd new file mode 100644 index 00000000..cbf04d09 --- /dev/null +++ b/man/gng.plot.layout.v2d.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{gng.plot.layout.v2d} +\alias{gng.plot.layout.v2d} +\title{Use first two spatial coordinates as position in layout} +\usage{ +gng.plot.layout.v2d(g) +} +\arguments{ +\item{g}{GNG object} +} +\description{ +Use first two spatial coordinates as position in layout +} +\note{ +You can pass any igraph layout algorithm to plot +} + diff --git a/man/gng.preset.cube.Rd b/man/gng.preset.cube.Rd new file mode 100644 index 00000000..828ac9af --- /dev/null +++ b/man/gng.preset.cube.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.presets.R +\name{gng.preset.cube} +\alias{gng.preset.cube} +\title{gng.preset.cube} +\usage{ +gng.preset.cube(N, r = 0.5, center = c(0.5, 0.5, 0.5)) +} +\arguments{ +\item{N}{Number of points} + +\item{r}{Length of the side of cube} + +\item{center}{Center of the plane} +} +\description{ +Generate sample cube dataset +} +\details{ +Cube preset dataset +} +\examples{ +X <- gng.preset.cube(100) +gng <- GNG(X) +} + diff --git a/man/gng.preset.plane.Rd b/man/gng.preset.plane.Rd new file mode 100644 index 00000000..aa5d86ea --- /dev/null +++ b/man/gng.preset.plane.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.presets.R +\name{gng.preset.plane} +\alias{gng.preset.plane} +\title{gng.preset.plane} +\usage{ +gng.preset.plane(N, side = 0.5, center = c(0.5, 0.5, 0.5)) +} +\arguments{ +\item{N}{Number of points} + +\item{side}{Length of the side of plane} + +\item{center}{Center of the plane} +} +\description{ +Generate sample plane dataset +} +\details{ +Plane preset dataset +} +\examples{ +X <- gng.preset.plane(100) +gng <- GNG(X) +} + diff --git a/man/gng.preset.sphere.Rd b/man/gng.preset.sphere.Rd new file mode 100644 index 00000000..5cf113c8 --- /dev/null +++ b/man/gng.preset.sphere.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.presets.R +\name{gng.preset.sphere} +\alias{gng.preset.sphere} +\title{gng.preset.sphere} +\usage{ +gng.preset.sphere(N, r = 0.5, center = c(0.5, 0.5, 0.5)) +} +\arguments{ +\item{N}{Number of points} + +\item{r}{Radius of the sphere} + +\item{center}{Center of the sphere} +} +\description{ +Generate sample sphere dataset +} +\details{ +Sphere preset dataset +} +\examples{ +X <- gng.preset.sphere(100) +gng <- GNG(X) +} + diff --git a/man/gngLoad.Rd b/man/gngLoad.Rd new file mode 100644 index 00000000..a83f48ea --- /dev/null +++ b/man/gngLoad.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{gngLoad} +\alias{gngLoad} +\title{gngLoad} +\usage{ +gngLoad(filename) +} +\arguments{ +\item{filename}{Binary file location} +} +\description{ +Writes model to a disk space efficient binary format. +} +\details{ +Load model from binary format +} + diff --git a/man/gngSave.Rd b/man/gngSave.Rd new file mode 100644 index 00000000..e2b1fa93 --- /dev/null +++ b/man/gngSave.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{gngSave} +\alias{gngSave} +\title{gngSave} +\usage{ +gngSave(object, filename) +} +\arguments{ +\item{object}{GNG object} + +\item{filename}{File where binary will be saved} +} +\description{ +Writes model to a disk space efficient binary format. +} +\details{ +Save model to binary format +} + diff --git a/man/hello_gmum.Rd b/man/hello_gmum.Rd deleted file mode 100644 index 00bf46bd..00000000 --- a/man/hello_gmum.Rd +++ /dev/null @@ -1,14 +0,0 @@ -\name{hello_gmum} -\alias{hello_gmum} -\title{Description -Hello gmum call! -Details -Prints out "Hello Gmum"} -\usage{ -hello_gmum() -} -\description{ -Description Hello gmum call! Details Prints out "Hello -Gmum" -} - diff --git a/man/insertExamples.Rd b/man/insertExamples.Rd new file mode 100644 index 00000000..98612ecd --- /dev/null +++ b/man/insertExamples.Rd @@ -0,0 +1,31 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{insertExamples} +\alias{insertExamples} +\title{insertExamples} +\usage{ +insertExamples(object, examples, labels = c()) +} +\arguments{ +\item{object}{GNG object} + +\item{examples}{\code{matrix} or \code{data.frame} with rows as examples. Note: if training online make sure +number of columns matches dim parameter passed to GNG constructor.} + +\item{labels}{\code{vector} of labels, that will be associated with nodes in the graph. GNG will assign to each +node a mean of labels of closest examples.} +} +\description{ +Insert examples with optional labels. +} +\note{ +It copies your examples twice in RAM. You might want to use object$.insertExamples. +} +\examples{ +X <- gng.preset.sphere(100) +gng <- GNG(X, train.online=TRUE) +# Add more examples +X = gng.preset.sphere(100) +insertExamples(gng, X) +} + diff --git a/man/isRunning.Rd b/man/isRunning.Rd new file mode 100644 index 00000000..1d1aadf6 --- /dev/null +++ b/man/isRunning.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{isRunning} +\alias{isRunning} +\title{isRunning} +\usage{ +isRunning(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Returns TRUE if GNG object is training +} +\details{ +Check if GNG is running +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +# FALSE, because did not pass train.online to constructor +print(isRunning(gng)) +} + diff --git a/man/iterations.Rd b/man/iterations.Rd new file mode 100644 index 00000000..28c4ef25 --- /dev/null +++ b/man/iterations.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{iterations} +\alias{iterations} +\alias{iterations,Rcpp_CecModel-method} +\title{iterations} +\usage{ +iterations(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print how many iterations it took to learn CEC model +} +\examples{ +\dontrun{ +iterations(c) +} +} + diff --git a/man/logClusters.Rd b/man/logClusters.Rd new file mode 100644 index 00000000..41a0f315 --- /dev/null +++ b/man/logClusters.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\docType{data} +\name{logClusters} +\alias{logClusters} +\alias{logClusters,Rcpp_CecModel-method} +\title{logClusters} +\format{\preformatted{ NULL +}} +\usage{ +logClusters(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print number of clusters that has been recorded at each stage of learning. +Data is recorded only if you have chosen to when you created CEC model object. +} +\examples{ +\dontrun{ +logClusters(c) +} +} +\keyword{datasets} + diff --git a/man/logEnergy.Rd b/man/logEnergy.Rd new file mode 100644 index 00000000..236122fd --- /dev/null +++ b/man/logEnergy.Rd @@ -0,0 +1,22 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{logEnergy} +\alias{logEnergy} +\alias{logEnergy,Rcpp_CecModel-method} +\title{logEnergy} +\usage{ +logEnergy(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Print energy that has been recorded at each stage of learning. +Data is recorded only if you have chosen to when you created CEC model object. +} +\examples{ +\dontrun{ +logEnergy(c) +} +} + diff --git a/man/meanError.Rd b/man/meanError.Rd new file mode 100644 index 00000000..8176408e --- /dev/null +++ b/man/meanError.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{meanError} +\alias{meanError} +\title{meanError} +\usage{ +meanError(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Gets mean error of the graph (note: blocks the execution, O(n)) +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +meanError(gng) +} + diff --git a/man/node-methods.Rd b/man/node-methods.Rd new file mode 100644 index 00000000..93b4cb90 --- /dev/null +++ b/man/node-methods.Rd @@ -0,0 +1,24 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{node} +\alias{node} +\title{node} +\usage{ +node(x, gng_id) +} +\arguments{ +\item{x}{GNG object} + +\item{gng_id}{Id of the node to retrieve. This is the id returned by functions like predict, or centroids} +} +\description{ +Retrieves node from resulting graph +} +\details{ +Get GNG node +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +print(node(gng, 10)$pos) +} + diff --git a/man/numberNodes.Rd b/man/numberNodes.Rd new file mode 100644 index 00000000..de442ee4 --- /dev/null +++ b/man/numberNodes.Rd @@ -0,0 +1,15 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{numberNodes} +\alias{numberNodes} +\title{numberNodes} +\usage{ +numberNodes(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Get current number of nodes in the graph +} + diff --git a/man/optimized-gng.Rd b/man/optimized-gng.Rd new file mode 100644 index 00000000..42a35c27 --- /dev/null +++ b/man/optimized-gng.Rd @@ -0,0 +1,65 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{OptimizedGNG} +\alias{OptimizedGNG} +\title{Constructor of Optimized GrowingNeuralGas object.} +\usage{ +OptimizedGNG(x = NULL, labels = c(), beta = 0.99, alpha = 0.5, + max.nodes = 1000, eps.n = 6e-04, eps.w = 0.05, max.edge.age = 200, + train.online = FALSE, max.iter = 200, dim = 0, + min.improvement = 0.001, lambda = 200, verbosity = 0, seed = -1, + value.range = c(0, 1)) +} +\arguments{ +\item{x}{Passed data (matrix of data.frame) for offline training} + +\item{labels}{Every example can be associated with labels that are added to nodes later. By default empty} + +\item{beta}{Decrease the error variables of all node +nodes by this fraction (forgetting rate). Default 0.99} + +\item{alpha}{Decrease the error variables of the nodes neighboring to +the newly inserted node by this fraction. Default 0.5} + +\item{max.nodes}{Maximum number of nodes +(after reaching this size it will continue running, but new noes won't be added)} + +\item{eps.n}{Strength of adaptation of neighbour node. Default \code{0.0006}} + +\item{eps.w}{Strength of adaptation of winning node. Default \code{0.05}} + +\item{max.edge.age}{Maximum edge age. Decrease to increase speed of change of graph topology. Default \code{200}} + +\item{train.online}{If used will run in online fashion. Default \code{FALSE}} + +\item{max.iter}{If training offline will stop if exceedes max.iter iterations. Default \code{200}} + +\item{dim}{Used for training online, specifies dataset example dimensionality} + +\item{min.improvement}{Used for offline (default) training. +Controls stopping criterion, decrease if training stops too early. Default \code{1e-3}} + +\item{lambda}{New vertex is added every lambda iterations. Default 200} + +\item{verbosity}{How verbose should the process be, as integer from \eqn{[0,6]}, default: \code{0}} + +\item{seed}{Seed for internal randomization} + +\item{value.range}{All example features should be in this range, required for optimized version of the algorithm. Default \code{(0,1)}} +} +\description{ +Construct simplified and optimized GNG object. Can be used to train offline, or online. Data dimensionality shouldn't be too big, if +it is consider using dimensionality reduction techniques. +} +\examples{ +\dontrun{ +# Train online optimizedGNG. All values in this dataset are in the range (-4.3, 4.3) +X <- gng.preset.sphere(100) +gng <- OptimizedGNG(train.online = TRUE, value.range=c(min(X), max(X)), dim=3, max.nodes=20) +insertExamples(gng, X) +run(gng) +Sys.sleep(10) +pause(gng) +} +} + diff --git a/man/pause.Rd b/man/pause.Rd new file mode 100644 index 00000000..362b6185 --- /dev/null +++ b/man/pause.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{pause} +\alias{pause} +\title{pause} +\usage{ +pause(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Pause algorithm +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +pause(gng) +print(isRunning(gng)) +} + diff --git a/man/plot.cec.Rd b/man/plot.cec.Rd new file mode 100644 index 00000000..182edba3 --- /dev/null +++ b/man/plot.cec.Rd @@ -0,0 +1,38 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.plot.R +\name{plot.Rcpp_CecModel} +\alias{plot.Rcpp_CecModel} +\title{plot} +\usage{ +\method{plot}{Rcpp_CecModel}(x, slice = c(), pca = FALSE, + ellipses = FALSE, centers = FALSE, ...) +} +\arguments{ +\item{x}{CEC model object.} + +\item{slice}{List of dimentions chosen for display since plot is 2D.} + +\item{pca}{Apply PCA or not} + +\item{ellipses}{Outline clusters.} + +\item{centers}{Marks center of every cluster.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Plot clustering found on 2D plot coloring by cluster. +} +\details{ +Plot CEC +} +\examples{ +\dontrun{ +plot(cec) +plot(cec, slice=c(1,3), ellipses=TRUE) +plot(cec, slice=c(1,2,3)) +plot(cec, ellipses=TRUE, centers=FALSE) +plot(cec, pca=TRUE, ellipses=TRUE, centers=FALSE) +} +} + diff --git a/man/plot.gng.Rd b/man/plot.gng.Rd new file mode 100644 index 00000000..16c19907 --- /dev/null +++ b/man/plot.gng.Rd @@ -0,0 +1,46 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{plot.Rcpp_GNGServer} +\alias{plot.Rcpp_GNGServer} +\title{plot GNG object} +\usage{ +\method{plot}{Rcpp_GNGServer}(x, vertex.color = gng.plot.color.cluster, + layout = layout.fruchterman.reingold, mode = gng.plot.2d, + vertex.size = 3, ...) +} +\arguments{ +\item{x}{GNG object} + +\item{vertex.color}{How to color vertexes. Possible values: \code{"fast.cluster"} (vertex color is set to fastgreedy.community clustering), +\code{"label"} (rounds to integer label if present), \code{list of integers} (colors vertices according to provided list), \code{"none"} (every node is white),} + +\item{layout}{igraph layout to be used when plotting. Defaults to \code{layout.fruchterman.reingold}. +Other good choice is using \code{gng.plot.layout.v2d}, which returns two first spatial coordinates.} + +\item{mode}{\code{"2d"} (igraph plot) +\code{"2d.errors"} (igraph plot with mean error log plot)} + +\item{vertex.size}{Size of plotted vertices} + +\item{...}{other arguments not used by this method.} +} +\description{ +Plot resulting graph using igraph plotting +} +\details{ +Plot GNG +} +\note{ +If you want to "power-use" plotting and plot for instance a subgraph, you might be interested in +exporting igraph with convertToIGraph function +} +\examples{ +\dontrun{ +gng <- GNG(scaled.wine) +# Plots igraph using first 2 coordinates and colors according to clusters +plot(gng, mode=gng.plot.2d.errors, layout=gng.plot.layout.v2d, vertex.color=gng.plot.color.cluster) + +# For more possibilities see gng.plot.* constants +} +} + diff --git a/man/plot.svm.Rd b/man/plot.svm.Rd new file mode 100644 index 00000000..9d62545f --- /dev/null +++ b/man/plot.svm.Rd @@ -0,0 +1,45 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\name{plot.Rcpp_SVMClient} +\alias{plot.Rcpp_SVMClient} +\title{Plot SVM object} +\usage{ +\method{plot}{Rcpp_SVMClient}(x, X = NULL, mode = "normal", cols = c(1, + 2), radius = 3, radius.max = 10, ...) +} +\arguments{ +\item{x}{Trained SVM object} + +\item{X}{Optional new data points to be predicted and plotted in one of the following formats: +\code{data.frame}, \code{data.matrix}; default: \code{NULL}} + +\item{mode}{Which plotting mode to use as string, available are: +\itemize{ +\item \code{'normal'} - default mode, plots data in cols argument and a linear decision + boundry in available +\item \code{'pca'} - preforms PCA decomposition and draws data in a subspace of first 2 dimensions +from the PCA +\item \code{'contour'} - countour plot for non-linear kernels +}} + +\item{cols}{Data dimensions to be plotted as vector of length 2, default: \code{c(1,2)}} + +\item{radius}{Radius of the plotted data points as float, default: \code{3}} + +\item{radius.max}{Maximum radius of data points can be plotted, when model is trained +with example weights as float, default: \code{10}} + +\item{...}{other arguments not used by this method.} +} +\description{ +Plots trained svm data and models disciriminative +} +\examples{ +\dontrun{ +# here we ause svm is a trained SVM model +plot(svm) +plot(svm, X=x, cols=c(1,3)) +plot(svm, mode="pca", radius=5) +} +} + diff --git a/man/predict.cec.Rd b/man/predict.cec.Rd new file mode 100644 index 00000000..6eeace4a --- /dev/null +++ b/man/predict.cec.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{predict} +\alias{predict} +\alias{predict,Rcpp_CecModel-method} +\alias{predict.Rcpp_CecModel} +\title{predict} +\usage{ +\method{predict}{Rcpp_CecModel}(object, x, ...) +} +\arguments{ +\item{object}{Trained CEC model object.} + +\item{x}{Given point.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Classify a new point according to the model (returns index of cluster where given point belong to) +} + diff --git a/man/predict.gng.Rd b/man/predict.gng.Rd new file mode 100644 index 00000000..c834daed --- /dev/null +++ b/man/predict.gng.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{predict.gng} +\alias{predict.Rcpp_GNGServer} +\alias{predict.gng} +\title{predict} +\usage{ +\method{predict}{Rcpp_GNGServer}(object, x, ...) +} +\arguments{ +\item{object}{Trained model} + +\item{x}{Vector or matrix of examples} + +\item{...}{other arguments not used by this method} +} +\description{ +Retrieves prediction from trained GNG model +} +\details{ +Predict +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +predict(gng, c(1,2,2)) +} + diff --git a/man/predict.svm.Rd b/man/predict.svm.Rd new file mode 100644 index 00000000..7f457a8e --- /dev/null +++ b/man/predict.svm.Rd @@ -0,0 +1,33 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\name{predict.Rcpp_SVMClient} +\alias{predict.Rcpp_SVMClient} +\title{Predict using SVM object} +\usage{ +\method{predict}{Rcpp_SVMClient}(object, x_test, decision.function = FALSE, + ...) +} +\arguments{ +\item{object}{Trained SVM object} + +\item{x_test}{Unlabeled data, in one of the following formats: +\code{data.frame}, \code{data.matrix}, \code{SparseM::matrix.csr}, \code{Matrix::Matrix}, +\code{slam::simple_triplet_matrix}} + +\item{decision.function}{Uf \code{TRUE} returns SVMs decision function +(distance of a point from discriminant) instead of predicted labels, default: \code{FALSE}} + +\item{...}{other arguments not used by this method.} +} +\description{ +Returns predicted classes or distance to discriminative for provided test examples. +} +\examples{ +\dontrun{ +# firstly, SVM model needs to be trained +svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) +# then we can use it to predict unknown samples +predcit(svm, x_test) +} +} + diff --git a/man/predictComponent-methods.Rd b/man/predictComponent-methods.Rd new file mode 100644 index 00000000..97d592ad --- /dev/null +++ b/man/predictComponent-methods.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\docType{methods} +\name{predictComponent} +\alias{predictComponent} +\title{predictComponent} +\usage{ +predictComponent(object, x) +} +\arguments{ +\item{object}{GNG object} + +\item{x}{Can be either \code{vector} or \code{data.frame}.} +} +\description{ +Finds connected component closest to given vector(s). On the first +execution of function strongly connected components are calculated using igraph::cluster function. +} +\details{ +Find closest component +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +# Find closest component to c(1,1,1) +predictComponent(gng, c(1,1,1)) +} + diff --git a/man/print.cec.Rd b/man/print.cec.Rd new file mode 100644 index 00000000..1f9add81 --- /dev/null +++ b/man/print.cec.Rd @@ -0,0 +1,22 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.print.R +\docType{methods} +\name{print.Rcpp_CecModel} +\alias{print.Rcpp_CecModel} +\title{print} +\usage{ +\method{print}{Rcpp_CecModel}(x, ...) +} +\arguments{ +\item{x}{CEC object model.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Print basic information about clusters found. +Presents a structure of the cec results object (clusters found) +} +\details{ +Print CEC +} + diff --git a/man/print.gng.Rd b/man/print.gng.Rd new file mode 100644 index 00000000..d372ad0a --- /dev/null +++ b/man/print.gng.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.utils.R +\docType{methods} +\name{print.Rcpp_GNGServer} +\alias{print.Rcpp_GNGServer} +\title{print} +\usage{ +\method{print}{Rcpp_GNGServer}(x, ...) +} +\arguments{ +\item{x}{GNG object model.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Print basic information about GNG object +} + diff --git a/man/print.svm.Rd b/man/print.svm.Rd new file mode 100644 index 00000000..b6332a2b --- /dev/null +++ b/man/print.svm.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\name{print.Rcpp_SVMClient} +\alias{print.Rcpp_SVMClient} +\title{Print SVM object} +\usage{ +\method{print}{Rcpp_SVMClient}(x, ...) +} +\arguments{ +\item{x}{Trained SVM object} + +\item{...}{other arguments not used by this method.} +} +\description{ +Prints short summary of a trained model. +} + diff --git a/man/rcpp_hello_world.Rd b/man/rcpp_hello_world.Rd deleted file mode 100755 index e4f90bf8..00000000 --- a/man/rcpp_hello_world.Rd +++ /dev/null @@ -1,17 +0,0 @@ -\name{rcpp_hello_world} -\alias{rcpp_hello_world} -\docType{package} -\title{ -Simple function using Rcpp -} -\description{ -Simple function using Rcpp -} -\usage{ -rcpp_hello_world() -} -\examples{ -\dontrun{ -rcpp_hello_world() -} -} diff --git a/man/run-methods.Rd b/man/run-methods.Rd new file mode 100644 index 00000000..628ab6b9 --- /dev/null +++ b/man/run-methods.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\name{run} +\alias{run} +\title{run} +\usage{ +run(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Run algorithm (in parallel) +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +run(gng) +print(isRunning(gng)) +} + diff --git a/man/runAll.Rd b/man/runAll.Rd new file mode 100644 index 00000000..28c47787 --- /dev/null +++ b/man/runAll.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{runAll} +\alias{runAll} +\alias{runAll,Rcpp_CecModel-method} +\title{runAll} +\usage{ +runAll(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +Starts whole algorithm again with same parameters +} +\examples{ +\dontrun{ +runAll(c) +} +} + diff --git a/man/runOneIteration.Rd b/man/runOneIteration.Rd new file mode 100644 index 00000000..cd54f603 --- /dev/null +++ b/man/runOneIteration.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.R +\name{runOneIteration} +\alias{runOneIteration} +\alias{runOneIteration,Rcpp_CecModel-method} +\title{runOneIteration} +\usage{ +runOneIteration(c) +} +\arguments{ +\item{c}{object Trained CEC model object.} +} +\description{ +runs one iteration of algorithm +} +\examples{ +\dontrun{ +runOneIteration(c) +} +} + diff --git a/man/summary.cec.Rd b/man/summary.cec.Rd new file mode 100644 index 00000000..9925c437 --- /dev/null +++ b/man/summary.cec.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/cec.summary.R +\docType{methods} +\name{summary.Rcpp_CecModel} +\alias{summary.Rcpp_CecModel} +\title{summary} +\usage{ +\method{summary}{Rcpp_CecModel}(object, ...) +} +\arguments{ +\item{object}{CEC model object.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Print detailed information about CEC model object +} +\details{ +Summary CEC +} + diff --git a/man/summary.gng.Rd b/man/summary.gng.Rd new file mode 100644 index 00000000..be5bcb91 --- /dev/null +++ b/man/summary.gng.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.utils.R +\docType{methods} +\name{summary.Rcpp_GNGServer} +\alias{summary.Rcpp_GNGServer} +\title{summary} +\usage{ +\method{summary}{Rcpp_GNGServer}(object, ...) +} +\arguments{ +\item{object}{GNG object model.} + +\item{...}{other arguments not used by this method.} +} +\description{ +Print basic information about GNG object +} +\details{ +Summary of GNG object +} + diff --git a/man/summary.svm.Rd b/man/summary.svm.Rd new file mode 100644 index 00000000..b8749034 --- /dev/null +++ b/man/summary.svm.Rd @@ -0,0 +1,17 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\name{summary.Rcpp_SVMClient} +\alias{summary.Rcpp_SVMClient} +\title{Summary of SVM object} +\usage{ +\method{summary}{Rcpp_SVMClient}(object, ...) +} +\arguments{ +\item{object}{Trained SVM object} + +\item{...}{other arguments not used by this method.} +} +\description{ +Prints short summary of a trained model. +} + diff --git a/man/svm.Rd b/man/svm.Rd new file mode 100644 index 00000000..53c0e8ab --- /dev/null +++ b/man/svm.Rd @@ -0,0 +1,137 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.R +\name{SVM} +\alias{SVM} +\alias{SVM.default} +\alias{SVM.formula} +\title{Create SVM object} +\usage{ +SVM(x, ...) + +\method{SVM}{formula}(formula, data, ...) + +\method{SVM}{default}(x, y, core = "libsvm", kernel = "linear", + prep = "none", transductive.learning = FALSE, + transductive.posratio = -1, C = 1, gamma = if (is.vector(x)) 1 else + 1/ncol(x), coef0 = 0, degree = 3, class.weights = NULL, + example.weights = NULL, cache_size = 100, tol = 0.001, max.iter = -1, + verbosity = 4, class.type = "one.versus.all", svm.options = "", ...) +} +\arguments{ +\item{x}{Training data without labels in one of the following formats: +\code{data.frame}, \code{data.matrix}, \code{SparseM::matrix.csr}, \code{Matrix::Matrix}, +\code{slam::simple_triplet_matrix}} + +\item{...}{other arguments not used by this method.} + +\item{formula}{Can be passed with \code{data} instead of \code{x}, \code{y} pair, +formula needs to point to lables column, for example: \code{target~.}} + +\item{data}{Can be passed instead of \code{x,} \code{y} pair with \code{formula} to mark the labels +column, supported formats are: +\code{data.frame}, \code{data.matrix}} + +\item{y}{Labels in one of the followinf formts: \code{factor}, \code{vector}. +Recommended type is \code{factor}} + +\item{core}{Support Vector Machine library to use in traning, available are: +\code{'libsvm'}, \code{'svmlight'}; default: \code{'libsvm'}} + +\item{kernel}{Kernel type as string, available are: \code{'linear'}, \code{'poly'}, +\code{'rbf'}, \code{'sigmoid'}; +default: \code{'linear'} +\itemize{ +\item \code{linear}: \eqn{x'*w} +\item \code{poly}: \eqn{(gamma*x'*w + coef0)^{degree}} +\item \code{rbf}: \eqn{exp(-gamma*|x-w|^2)} +\item \code{sigmoid}: \eqn{tanh(gamma*x'*w + coef0)} +}} + +\item{prep}{Preprocess method as string, available are: \code{'none'}, \code{'2e'}; +default: \code{'none'}. For more information on \code{2eSVM} see: +\url{http://www.sciencedirect.com/science/article/pii/S0957417414004138}} + +\item{transductive.learning}{Option got SVM model to deduce missing labels from the dataset, +default: \code{FALSE} +NOTE: this feature is only available with svmlight library, missing labels are marked as +\code{'TR'}, if none are found and transductive to \code{TRUE}, label \code{0} will be +interpreted as missing} + +\item{transductive.posratio}{Fraction of unlabeled examples to be classified into the positive class +as float from \eqn{[0,1]}, default: the ratio of positive and negative examples in the training data} + +\item{C}{Cost/complexity parameter, default: \code{1}} + +\item{gamma}{Parameter for \code{poly}, \code{rbf} and \code{sigmoid} kernels, +default: \code{1/n_features}} + +\item{coef0}{For \code{poly} and \code{sigmoid} kernels, default: \code{0}} + +\item{degree}{For \code{poly} kernel, default: \code{3}} + +\item{class.weights}{Named vector with weight fir each class, default: \code{NULL}} + +\item{example.weights}{Vector of the same length as training data with weights for each traning example, +default: \code{NULL} NOTE: this feature is only supported with svmlight library} + +\item{cache_size}{Cache memory size in MB, default: \code{100}} + +\item{tol}{Tolerance of termination criterion, default: \code{1e-3}} + +\item{max.iter}{Depending on library: +\itemize{ + \item libsvm: number of iterations after which the training porcess is killed + (it can end earlier is desired tolerance is met), default: \code{1e6} + \item svmlight: number of iterations after which if there is no progress traning is killed, + default: \code{-1} (no limit) + }} + +\item{verbosity}{How verbose should the process be, as integer from \eqn{[1,6]}, default: \code{4}} + +\item{class.type}{Multiclass algorithm type as string, +available are: \code{'one.versus.all', 'one.versus.one'}; default: \code{'one.versus.one'}} + +\item{svm.options}{enables to pass all svmlight command lines arguments for more advanced options, +for details see \url{http://svmlight.joachims.org/}} +} +\value{ +SVM model object +} +\description{ +Create and train SVM model object. +} +\examples{ +\dontrun{ +# train SVM from data in x and labels in y +svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) + +# train SVM using a dataset with both data and lables and a formula pointing to labels +formula <- target ~ . +svm <- SVM(formula, data, core="svmlight", kernel="rbf", gamma=1e3) + +# train a model with 2eSVM algorithm +data(svm_breast_cancer_dataset) +ds <- svm.breastcancer.dataset +svm.2e <- SVM(x=ds[,-1], y=ds[,1], core="libsvm", kernel="linear", prep = "2e", C=10); +# more at \\url{http://r.gmum.net/samples/svm.2e.html} + +# train SVM on a multiclass data set +data(iris) +# with "one vs rest" strategy +svm.ova <- SVM(Species ~ ., data=iris, class.type="one.versus.all", verbosity=0) +# or with "one vs one" strategy +svm.ovo <- SVM(x=iris[,1:4], y=iris[,5], class.type="one.versus.one", verbosity=0) + +# we can use svmlights sample weighting feature, suppose we have weights vector +# with a weight for every sample in the traning data +weighted.svm <- SVM(formula=y~., data=df, core="svmlight", kernel="rbf", C=1.0, + gamma=0.5, example.weights=weights) + +# svmlight alows us to determine missing labels from a dataset +# suppose we have a labels y with missing labels marked as zeros +svm.transduction <- SVM(x, y, transductive.learning=TRUE, core="svmlight") + +# for more in-depth examples visit \\url{http://r.gmum.net/getting_started.html} +} +} + diff --git a/man/svm.accuracy.Rd b/man/svm.accuracy.Rd new file mode 100644 index 00000000..6b397ea9 --- /dev/null +++ b/man/svm.accuracy.Rd @@ -0,0 +1,29 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/svm.utils.R +\name{svm.accuracy} +\alias{svm.accuracy} +\title{Measure accuracy scoreof a prediction} +\usage{ +svm.accuracy(prediction, target) +} +\arguments{ +\item{prediction}{factor or 1 dim vector with predicted classes} + +\item{target}{factor or 1 dim vector with true classes + +#'} +} +\description{ +Calculates accuracy of a prediction, returns precent of correctly predicted examples +over all test examples. +} +\examples{ +\dontrun{ +# firstly, SVM model needs to be trained +svm <- SVM(x, y, core="libsvm", kernel="linear", C=1) +# then we can use it to predict unknown samples +p <- predcit(svm, x_test) +acc <- svm.accuracy(p, y) +} +} + diff --git a/man/svm.breastcancer.dataset.Rd b/man/svm.breastcancer.dataset.Rd new file mode 100644 index 00000000..a0851367 --- /dev/null +++ b/man/svm.breastcancer.dataset.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{svm.breastcancer.dataset} +\alias{svm.breastcancer.dataset} +\title{svm.breastcancer.dataset} +\description{ +UCI breast cancer dataset +} +\keyword{data} + diff --git a/man/svm.transduction.Rd b/man/svm.transduction.Rd new file mode 100644 index 00000000..d872954c --- /dev/null +++ b/man/svm.transduction.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/misc.R +\docType{data} +\name{svm.transduction} +\alias{svm.transduction} +\title{svm.transduction} +\description{ +Dataset used in transduction demo on website +} +\keyword{data} + diff --git a/man/terminate-methods.Rd b/man/terminate-methods.Rd new file mode 100644 index 00000000..63a94c03 --- /dev/null +++ b/man/terminate-methods.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2 (4.1.1): do not edit by hand +% Please edit documentation in R/gng.R +\docType{methods} +\name{terminate} +\alias{terminate} +\title{terminate} +\usage{ +terminate(object) +} +\arguments{ +\item{object}{GNG object} +} +\description{ +Terminate algorithm +} +\examples{ +gng <- GNG(gng.preset.sphere(100)) +terminate(gng) +} + diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 00000000..f4e7be8c --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,21 @@ +cmake_minimum_required(VERSION 3.0) +project(gmum.r) + +file(GLOB GMUMR_SOURCES + "${SRC_DIR}/cec/*.cpp" "${SRC_DIR}/cec/*.c" + "${SRC_DIR}/svm/*.cpp" "${SRC_DIR}/svm/*.c" + "${SRC_DIR}/gng/*.cpp" "${SRC_DIR}/gng/*.c" + "${SRC_DIR}/svmlight/*.cpp" "${SRC_DIR}/svmlight/*.c" + "${SRC_DIR}/utils/*.cpp" "${SRC_DIR}/utils/*.c") + +set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${R_LD_FLAGS}") +set(CMAKE_CXX_FLAGS "${R_CXX_FLAGS} -std=c++0x") + +if(APPLE) + set(CMAKE_SHARED_LIBRARY_SUFFIX ".so") +endif() + +add_library(gmum.r SHARED ${GMUMR_SOURCES}) +target_link_libraries(gmum.r ${R_LIBS}) +set_target_properties(gmum.r PROPERTIES PREFIX "") +INSTALL(TARGETS gmum.r DESTINATION ${SRC_DIR}) diff --git a/src/Makevars b/src/Makevars old mode 100755 new mode 100644 index c880859a..ebe2dd7f --- a/src/Makevars +++ b/src/Makevars @@ -1,27 +1,23 @@ -## Use the R_HOME indirection to support installations of multiple R version -PKG_LIBS = `$(R_HOME)/bin/Rscript -e "Rcpp:::LdFlags()"` +# NOTE: R is overwriting all optimization flags, if you are a dev you can change flags in ~/.R/Makevars +# NOTE: R package should use POSIX Makefile -## As an alternative, one can also add this code in a file 'configure' -## -## PKG_LIBS=`${R_HOME}/bin/Rscript -e "Rcpp:::LdFlags()"` -## -## sed -e "s|@PKG_LIBS@|${PKG_LIBS}|" \ -## src/Makevars.in > src/Makevars -## -## which together with the following file 'src/Makevars.in' -## -## PKG_LIBS = @PKG_LIBS@ -## -## can be used to create src/Makevars dynamically. This scheme is more -## powerful and can be expanded to also check for and link with other -## libraries. It should be complemented by a file 'cleanup' -## -## rm src/Makevars -## -## which removes the autogenerated file src/Makevars. -## -## Of course, autoconf can also be used to write configure files. This is -## done by a number of packages, but recommended only for more advanced users -## comfortable with autoconf and its related tools. +CPP_SOURCES = svm/libsvm_runner.cpp svm/svm_client.cpp svm/svmlight_runner.cpp svm/svm_wrapper_module.cpp svm/two_e_svm_pre.cpp svm/svm_basic.cpp svm/svm.cpp svm/svm_utils.cpp svm/two_e_svm_post.cpp svm/validator_runner.cpp cec/assignment.cpp cec/cec.cpp cec/centroids_assignment.cpp cec/cluster_custom_function.cpp cec/kmeanspp_assignment.cpp cec/cec_configuration.cpp cec/cec_module.cpp cec/cluster.cpp cec/hartigan.cpp cec/random_assignment.cpp utils/threading.cpp utils/tinythread.cpp utils/utils.cpp gng/gng_algorithm.cpp gng/gng_configuration.cpp gng/gng_module.cpp gng/gng_server.cpp +SOURCES = $(CPP_SOURCES) svmlight/svm_common.c svmlight/svm_hideo.c svmlight/svm_learn.c +INCLUDES = -I ../inst/include/svmlight -I ../inst/include/utils -I ../inst/include -I ../inst/include/cec -I ../inst/include/gng -I ../inst/include/svm +SVMLIGHT_OBJECTS = svmlight/svm_common.o svmlight/svm_hideo.o svmlight/svm_learn.o +COMPILED_LIBRARIES_OBJECTS = $(SVMLIGHT_OBJECTS) +OBJECTS = $(CPP_SOURCES:.cpp=.o) $(COMPILED_LIBRARIES_OBJECTS) + +DEBUG = -DNDEBUG_GMUM +PREPROCESS = $(DEBUG) -DRCPP_INTERFACE -DARMA_64BIT_WORD -DBOOST_DISABLE_ASSERTS + +R_LIBS = `$(R_HOME)/bin/R CMD config --ldflags` +LAPACK_LIBS = `$(R_HOME)/bin/R CMD config LAPACK_LIBS` +BLAS_LIBS = `$(R_HOME)/bin/R CMD config BLAS_LIBS` + +# Do not delete R_CPPFLAGS, it will break tests +PKG_CPPFLAGS = $(GCC_STD) $(PREPROCESS) $(R_CPPFLAGS) $(INCLUDES) -pthread +PKG_CXXFLAGS = --std=c++0x +PKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(R_LIBS) $(FLIBS) diff --git a/src/Makevars.win b/src/Makevars.win old mode 100755 new mode 100644 index 18d49b41..b5932a24 --- a/src/Makevars.win +++ b/src/Makevars.win @@ -1,3 +1,21 @@ +# Note!: R is overwriting all optimization flags, if you are a dev you can change flags in ~/.R/Makevars -## Use the R_HOME indirection to support installations of multiple R version -PKG_LIBS = $(shell "${R_HOME}/bin${R_ARCH_BIN}/Rscript.exe" -e "Rcpp:::LdFlags()") +CPP_SOURCES := $(wildcard svm/*.cpp cec/*.cpp utils/*.cpp gng/*.cpp) +SOURCES := $(CPP_SOURCES) $(wildcard svmlight/*.c) +INCLUDES := -I ../inst/include/svmlight -I ../inst/include/utils -I ../inst/include -I ../inst/include/cec -I ../inst/include/gng -I ../inst/include/svm + +SVMLIGHT_OBJECTS = svmlight/svm_common.o svmlight/svm_hideo.o svmlight/svm_learn.o +COMPILED_LIBRARIES_OBJECTS = $(SVMLIGHT_OBJECTS) +OBJECTS = $(CPP_SOURCES:.cpp=.o) $(COMPILED_LIBRARIES_OBJECTS) + +DEBUG := -DNDEBUG_GMUM +PREPROCESS := $(DEBUG) -DARMA_DONT_USE_CXX11 -DRCPP_INTERFACE -DARMA_64BIT_WORD + +R_LIBS := $(shell R CMD config --ldflags) $(shell echo 'Rcpp:::LdFlags()' | R --vanilla --slave) +R_CPPFLAGS := $(shell R CMD config --cppflags) $(shell echo 'Rcpp:::CxxFlags()' | R --vanilla --slave) $(shell echo 'RcppArmadillo:::CxxFlags()' | R --vanilla --slave) +LAPACK_LIBS := $(shell R CMD config LAPACK_LIBS) +BLAS_LIBS := $(shell R CMD config BLAS_LIBS) + +PKG_CPPFLAGS := $(PKG_CPPFLAGS) $(GCC_STD) $(PREPROCESS) $(R_CPPFLAGS) $(INCLUDES) -mtune=native -static-libstdc++ -static-libgcc -lstdc++ -lpthread +PKG_CXXFLAGS := --std=c++0x +PKG_LIBS := $(PKG_LIBS) $(LAPACK_LIBS) $(BLAS_LIBS) $(R_LIBS) $(FLIBS) -static-libstdc++ -static-libgcc -lstdc++ -lpthread diff --git a/src/README.md b/src/README.md new file mode 100644 index 00000000..b64f4687 --- /dev/null +++ b/src/README.md @@ -0,0 +1,73 @@ +# Manual latest version installation + +## Linux/MacOS + +Installation should be straightforward. + +Specify commit, tag, or branch name using **ref** argument. Defaults to "master". + +Run in R: + +```{R} +devtools::install_github("gmum/gmum.r", ref="dev") +``` + +Then load package for use: +```{R} +library(gmum.r) +``` + +## Compile with CMake + +Go to project root directory and run following commands + +```{shell} +mkdir build +cd build +cmake .. +make +make install +cd .. +``` + +Now u can use library from R + +```{R} +library(devtools) +devtools::load_all() +... +``` + +## Windows + +Please download appropriate version of [Rtools](http://cran.r-project.org/bin/windows/Rtools/). + +Specify commit, tag, or branch name using **ref** argument. Defaults to "master". + +Run R matching your CPU architecture (x64 vs x86) + +```{R} +devtools::install_github("gmum/gmum.r", ref="dev", args="--no-lock --no-multiarch") +``` + +To build both x64 and x86 run: + +```{R} +devtools::install_github("gmum/gmum.r", ref="dev", args="--no-lock --preclean") +``` + +Then load package for use: +```{R} +library(gmum.r) +``` + +### Known issues: + +* If you have non-english letters in home path please consider installing from administrator cmd. + +```{shell} +R CMD INSTALL . --no-lock --no-multiarch --preclean --library= +``` + +, where path might be `C:\Program Files\R\R-3.1.2\library"`. + diff --git a/src/cec/assignment.cpp b/src/cec/assignment.cpp new file mode 100644 index 00000000..29bbd336 --- /dev/null +++ b/src/cec/assignment.cpp @@ -0,0 +1,77 @@ +#include "assignment.hpp" + +namespace gmum { + +Assignment::Assignment(const arma::mat &points, const unsigned int nclusters) + : m_points(points), m_nclusters(nclusters), m_seed(0) +{ } + +Assignment::~Assignment() { } + +void Assignment::set_seed(int seed) +{ + m_seed = seed; +} + +unsigned int find_nearest(unsigned int i, + const std::vector ¢ers, const arma::mat &points) { + + arma::rowvec point = points.row(i); + float distance = std::numeric_limits::max(); + unsigned int nearest = -1; + + for (unsigned int i = 0; i < centers.size(); ++i) { + + arma::rowvec vec = points.row(centers[i]) - point; + float temp_dist = arma::as_scalar(vec * vec.t()); + + if (distance > temp_dist) { + distance = temp_dist; + nearest = i; + } + + } + return nearest; +} + +unsigned int find_nearest(unsigned int i, + const std::list > ¢ers, + const arma::mat &points) { + + arma::rowvec point = points.row(i); + float distance = std::numeric_limits::max(); + unsigned int nearest = -1; + unsigned int j = 0; + + BOOST_FOREACH(std::vector center, centers) { + + arma::rowvec centr = arma::conv_to::from(center); + arma::rowvec vec = centr - point; + float tempDist = arma::as_scalar(vec*vec.t()); + if(distance > tempDist) { + distance = tempDist; + nearest = j; + } + j++; + + } + + return nearest; +} + +void assign_points(std::vector &assignment, + const std::vector ¢ers, const arma::mat &points) { + + for (unsigned int i = 0; i < assignment.size(); ++i) + assignment[i] = find_nearest(i, centers, points); +} + +void assign_points(std::vector &assignment, + const std::list > ¢ers, + const arma::mat &points) { + + for (unsigned int i = 0; i < assignment.size(); ++i) + assignment[i] = find_nearest(i, centers, points); +} + +} diff --git a/src/cec/cec.cpp b/src/cec/cec.cpp new file mode 100644 index 00000000..def1f796 --- /dev/null +++ b/src/cec/cec.cpp @@ -0,0 +1,314 @@ +#include "cec.hpp" +#include "hartigan.hpp" +#include "assignment.hpp" +#include "kmeanspp_assignment.hpp" +#include "centroids_assignment.hpp" +#include "random_assignment.hpp" + + +// Added those because mingw is stupid. Don't remove! +#define M_E 2.71828182845904523536 +#define M_PI 3.14159265358979323846 + + +using namespace gmum; + +void CecModel::clear_clusters() { + if (m_clusters.empty()) { + return; + } + for (std::vector::iterator it = m_clusters.begin(); + it != m_clusters.end(); ++it) { + delete *it; + } + m_clusters.clear(); +} + +Cluster* CecModel::create_cluster(ClusterParams* cluster_params, int i) { + Params& params = m_config->get_params(); + Cluster * cluster = 0; + switch (cluster_params->type) { + case kno_type: // TODO: handle knoType parameter + case kmix: // TODO: handle kmix parameter + break; + case kstandard: + cluster = new ClusterStandard(i, m_assignment, params.dataset); + break; + case kfixed_covariance: { + ClusterFixedCovarianceParams *ptr = dynamic_cast(cluster_params); + cluster = new ClusterFixedCovariance(ptr->cov_mat, i, m_assignment, params.dataset); + break; + } + case kdiagonal: + cluster = new ClusterDiagonal(i, m_assignment, params.dataset); + break; + case kspherical: + cluster = new ClusterSpherical(i, m_assignment, params.dataset); + break; + case kfixed_spherical: { + ClusterSphericalFixedRParams * ptr = dynamic_cast(cluster_params); + cluster = new ClusterSphericalFixedR(ptr->radius, i, m_assignment, params.dataset); + break; + } +#ifdef RCPP_INTERFACE + case kcustom: { + ClusterCustomParams* ptr = dynamic_cast(cluster_params); + cluster = new ClusterCustomFunction(i, m_assignment, params.dataset, ptr->function); + break; + } +#endif + } + return cluster; +} + +CecModel::~CecModel() { + clear_clusters(); +} + +CecModel::CecModel(CecConfiguration *cfg) : + m_config(cfg) { + find_best_cec(); +} +CecModel::CecModel(CecModel &other) : + m_config(0) { + *this = other; +} + +CecModel& CecModel::operator=(CecModel& other) { + if (this != &other) { + m_result = other.m_result; + m_assignment = other.m_assignment; + m_inv_set = other.m_inv_set; + m_inv = other.m_inv; + m_config = other.m_config; + clear_clusters(); + for (std::vector::iterator it = other.m_clusters.begin(); + it != other.m_clusters.end(); ++it) { + m_clusters.push_back((*it)->clone()); + } + } + return *this; +} + +void CecModel::init_clusters(std::vector& assignment) { + Params& params = m_config->get_params(); + m_assignment = assignment; + clear_clusters(); + m_clusters.reserve(params.nclusters); + + int i = 0; + if (params.cluster_type == kmix) { + for(std::list >::iterator it = params.clusters.begin(); it != params.clusters.end(); ++it) + { + m_clusters.push_back(create_cluster(it->get(), i)); + ++i; + } + } else { + //TODO: why pointer? + boost::scoped_ptr cluster; + switch (params.cluster_type) { + case kfixed_spherical: + cluster.reset(new ClusterSphericalFixedRParams(params.radius)); + break; + case kfixed_covariance: + cluster.reset(new ClusterFixedCovarianceParams(params.cov_mat)); + break; +#ifdef RCPP_INTERFACE + case kcustom: + cluster.reset(new ClusterCustomParams(params.function)); + break; +#endif + default: + /*case standard: + case diagonal: + case spherical:*/ + cluster.reset(new ClusterParams(params.cluster_type)); + break; + } + for (unsigned int i = 0; i < params.nclusters; ++i) + { + m_clusters.push_back(create_cluster(cluster.get(), i)); + } + } +} + +void CecModel::find_best_cec() { + std::vector assignment; + Params& params = m_config->get_params(); + boost::scoped_ptr assignment_type; + + switch (params.assignment_type) { + case krandom: + assignment_type.reset(new RandomAssignment(params.dataset, params.nclusters)); + break; + case kkmeanspp: + assignment_type.reset(new KmeansppAssignment(params.dataset, params.nclusters)); + break; + case kcentroids: + assignment_type.reset(new CentroidsAssignment(params.dataset, params.nclusters, params.centroids)); + break; + } + assignment_type->set_seed(params.seed); + + assignment.resize(params.dataset.n_rows); + (*assignment_type)(assignment); + + init_clusters(assignment); + + if(params.it_max == 0) return; + + try { + loop(); + CecModel best_cec = *this; + for (unsigned int i = 1; i < params.nstart; ++i) { + (*assignment_type)(assignment); + init_clusters(assignment); + loop(); + + if(!std::isnormal(m_result.energy)) + { + continue; + } + + if (m_result.energy < best_cec.get_result().energy) { + best_cec = *this; + } + } + *this = best_cec; + if(!std::isnormal(m_result.energy)) + { + GMUM_WARNING("There are degenerated clusters! You should try run CEC with other parameters") + } + } catch (std::exception &e) { +#ifdef RCPP_INTERFACE + Rcpp::stop(std::string("exception ") + e.what() + " caught in CEC_new"); +#else + std::cerr << std::string("exception ") + e.what() + " caught in CEC_new" + << std::endl; + exit(1); +#endif + } +} + +void CecModel::loop() { + Params& params = m_config->get_params(); + m_result = params.algorithm->loop(params.dataset, m_assignment, params.kill_threshold, m_clusters); + int nclusters = m_clusters.size(); + m_inv_set.resize(nclusters, false); + m_inv.resize(nclusters); +} + +void CecModel::single_loop() { + Params& params = m_config->get_params(); + SingleResult sr = params.algorithm->single_loop(params.dataset, m_assignment, params.kill_threshold, m_clusters); + m_result.append(sr, params.log_nclusters, params.log_energy); +} + +double CecModel::entropy() { + double s = 0.0; + + BOOST_FOREACH(Cluster * cluster, m_clusters) + { + s += cluster->entropy(); + } + return s; +} + +std::vector CecModel::get_assignment() const { + return m_assignment; +} + +void CecModel::set_assignment(std::vector& assignment) { + //TODO: handle set assignment + m_assignment = assignment; +} + +arma::mat CecModel::get_points() { + return m_config->get_params().dataset; +} + +std::vector CecModel::centers() const { + std::vector < arma::rowvec > array; + array.reserve(m_clusters.size()); + for (unsigned int i = 0; i < m_clusters.size(); ++i) { + array.push_back(m_clusters[i]->get_mean()); + } + return array; +} + +std::vector CecModel::cov() const { + Params& params = m_config->get_params(); + std::vector < arma::mat > array; + array.reserve(m_clusters.size()); + + for (unsigned int i = 0; i < m_clusters.size(); ++i) { + array.push_back(m_clusters[i]->get_cov_mat(i, m_assignment, params.dataset)); + } + + return array; +} + +unsigned int CecModel::iters() const { + return m_result.iterations; +} + +std::list CecModel::get_nclusters() const { + Params& params = m_config->get_params(); + if(!params.log_nclusters) + { + GMUM_ERROR("log.nclusters is available only if you turn log.ncluster=TRUE"); + } + return m_result.nclusters; +} + +std::list CecModel::get_energy_history() const { + Params& params = m_config->get_params(); + if(!params.log_energy) + { + GMUM_ERROR("log.energy is available only if you turn log.energy=TRUE"); + } + return m_result.energy_history; +} + +double CecModel::get_energy() const { + return m_result.energy; +} + +unsigned int CecModel::predict(std::vector vec) { + Params& params = m_config->get_params(); + arma::rowvec x = arma::conv_to < arma::rowvec > ::from(vec); + + unsigned int assign = 0; + double result = std::numeric_limits::min(); + for (unsigned int i = 0; i < m_clusters.size(); ++i) { + arma::mat cov_mat = m_clusters[i]->get_cov_mat(i, m_assignment, params.dataset); + arma::rowvec mean = m_clusters[i]->get_mean(); + if (!m_inv_set[i]) { + arma::mat Q, R; + arma::qr_econ(Q, R, cov_mat); + int dim = mean.n_cols; + arma::mat Id = arma::eye(dim, dim); + m_inv[i] = solve(R, Id) * Q.t(); + m_inv_set[i] = true; + } + + double const_multiplier = pow(2 * M_PI, -0.5 * x.n_cols) + * pow(arma::det(cov_mat), -0.5); + double scalar = arma::as_scalar( + (x - mean) * m_inv[i] * ((x - mean).t())); + double exponens = exp(-0.5 * scalar); + double p = m_clusters[i]->size() / (double) params.dataset.n_rows; + double tmp = p + const_multiplier * exponens; + + if (tmp > result) { + result = p + const_multiplier * exponens; + assign = i; + } + } + + return assign; +} + +const gmum::TotalResult& CecModel::get_result() const { + return m_result; +} diff --git a/src/cec/cec_configuration.cpp b/src/cec/cec_configuration.cpp new file mode 100644 index 00000000..8f6b4584 --- /dev/null +++ b/src/cec/cec_configuration.cpp @@ -0,0 +1,321 @@ +#include "cec_configuration.hpp" +#include "assignment.hpp" +#include "const.hpp" +#include +#include +#include +#include +#include + +using namespace gmum; + +CecConfiguration::CecConfiguration() +{ +} + +Params &CecConfiguration::get_params() +{ + return this->m_params; +} + +void CecConfiguration::set_params(Params params) +{ + this->m_params = params; +} + +#ifdef RCPP_INTERFACE +void CecConfiguration::set_data_set(const Rcpp::NumericMatrix proxy_dataset) +{ + m_params.dataset = arma::mat(proxy_dataset.begin(), proxy_dataset.nrow(), proxy_dataset.ncol()); +} +void CecConfiguration::set_cov(const Rcpp::NumericMatrix cov_mat_proxy) +{ + //TODO better check is empty matrix + if (!Rf_isNull(cov_mat_proxy) && m_params.clusters.empty()) + { + m_params.cov_mat_set = true; + m_params.cov_mat = arma::mat(cov_mat_proxy.begin(), cov_mat_proxy.nrow(), + cov_mat_proxy.ncol()); + } else { + m_params.cov_mat_set = false; + } +} + +void CecConfiguration::set_mix_handle_fixed_covariance_cluster(Rcpp::List &list) +{ + if (list.containsElementNamed(CONST::CLUSTERS::cov_mat)) + { + Rcpp::NumericMatrix temp = Rcpp::as < Rcpp::NumericMatrix > (list[CONST::CLUSTERS::cov_mat]); + m_params.clusters.push_back(boost::make_shared(arma::mat(temp.begin(), temp.nrow(), temp.ncol()))); + } else { + GMUM_ERROR(CONST::ERRORS::cov_mat_req); + } +} + +void CecConfiguration::set_mix_handle_fixed_spherical_cluster(Rcpp::List &list) +{ + if (list.containsElementNamed(CONST::CLUSTERS::radius)) + { + double radius = Rcpp::as(list[CONST::CLUSTERS::radius]); + m_params.clusters.push_back(boost::make_shared(radius)); + } else { + m_params.clusters.push_back(boost::make_shared()); + } +} + +void CecConfiguration::set_mix_handle_custom_cluster(Rcpp::List &list) +{ + if (list.containsElementNamed(CONST::CLUSTERS::function_name)) + { + Rcpp::Function func = Rcpp::as(list[CONST::CLUSTERS::function_name]); + m_params.clusters.push_back(boost::make_shared(boost::make_shared(func))); + } else { + GMUM_ERROR(CONST::ERRORS::function_name_req); + } +} + +void CecConfiguration::set_mix_handle_standard_cluster(Rcpp::List &list) +{ + m_params.clusters.push_back(boost::make_shared(kstandard)); +} + +void CecConfiguration::set_mix_handle_spherical_cluster(Rcpp::List &list) +{ + m_params.clusters.push_back(boost::make_shared(kspherical)); +} + +void CecConfiguration::set_mix_handle_diagonal_cluster(Rcpp::List &list) +{ + m_params.clusters.push_back(boost::make_shared(kdiagonal)); +} + +void CecConfiguration::set_mix(const Rcpp::List clusters) +{ + if (Rf_isNull(clusters)) { return; } + m_params.nclusters = 0; + + Rcpp::List desc = Rcpp::as < Rcpp::List > (clusters); + for (Rcpp::List::iterator it = desc.begin(); it != desc.end(); ++it) + { + Rcpp::List list = Rcpp::as < Rcpp::List > (*it); + if (!list.containsElementNamed(CONST::CLUSTERS::type)) + { + GMUM_ERROR("method.type not defined"); + } + + std::string typeStr = Rcpp::as < std::string > (list[CONST::CLUSTERS::type]); + + int count = 1; + if(list.containsElementNamed(CONST::nclusters)) + { + count = Rcpp::as(list[CONST::nclusters]); + if(count <= 0) + { + GMUM_ERROR("k must be greater or equal 1"); + } + } + m_params.nclusters += count; + + for(int i = 0; i < count; ++i) + { + if (typeStr.compare(CONST::CLUSTERS::standard) == 0) + { + set_mix_handle_standard_cluster(list); + } else if (typeStr.compare(CONST::CLUSTERS::fixed_covariance) == 0) { + set_mix_handle_fixed_covariance_cluster(list); + } else if (typeStr.compare(CONST::CLUSTERS::fixed_spherical) == 0) { + set_mix_handle_fixed_spherical_cluster(list); + } else if (typeStr.compare(CONST::CLUSTERS::spherical) == 0) { + set_mix_handle_spherical_cluster(list); + } else if (typeStr.compare(CONST::CLUSTERS::diagonal) == 0) { + set_mix_handle_diagonal_cluster(list); + } else if (typeStr.compare(CONST::CLUSTERS::custom) == 0) { + set_mix_handle_custom_cluster(list); + } else { + GMUM_ERROR(typeStr + ": unknown method.type"); + } + } + } + + if(m_params.nclusters <= 0) + { + GMUM_ERROR("method type mix require at least one cluster parameter (params.mix)"); + } +} + +void CecConfiguration::set_centroids(const Rcpp::List centroids) { + if (!Rf_isNull(centroids)) { + Rcpp::List desc = Rcpp::as < Rcpp::List > (centroids); + for (Rcpp::List::iterator it = desc.begin(); it != desc.end(); ++it) + m_params.centroids.push_back(Rcpp::as < std::vector > (*it)); + m_params.centroids_set = true; + } else + m_params.centroids_set = false; +} +#endif + +void CecConfiguration::set_eps(const double kill_threshold) { + m_params.kill_threshold = kill_threshold; +} + +void CecConfiguration::set_nclusters(const unsigned int nclusters) { + if (nclusters != 0) + m_params.nclusters = nclusters; + else if (m_params.clusters.size() > 0) + m_params.nclusters = m_params.clusters.size(); + else + m_params.nclusters = CONST::nclusters_init; + + if (m_params.dataset.n_rows < m_params.nclusters) + GMUM_ERROR(CONST::ERRORS::dataset_size); +} + +void CecConfiguration::set_log_energy(bool log_energy) { + m_params.log_energy = log_energy; +} + +void CecConfiguration::set_log_cluster(bool log_nclusters) { + m_params.log_nclusters = log_nclusters; +} + +void CecConfiguration::set_nstart(const unsigned int nstart) { + m_params.nstart = nstart; +} + +void CecConfiguration::set_method_init(const std::string init) { + m_params.assignment_type = CONST::default_assignment; + if (init.compare(CONST::CLUSTERS::random) == 0) + m_params.assignment_type = krandom; + else if (init.compare(CONST::CLUSTERS::kmeanspp) == 0) + m_params.assignment_type = kkmeanspp; + else if (init.compare(CONST::CLUSTERS::centroids) == 0) + m_params.assignment_type = kcentroids; + else + GMUM_ERROR(CONST::ERRORS::assignment_error); + + if (m_params.assignment_type == kcentroids + && m_params.centroids.size() != m_params.nclusters) + GMUM_ERROR(CONST::ERRORS::centroids_error); + + switch (m_params.cluster_type) { + case kfixed_covariance: + { + if (!m_params.cov_mat_set) GMUM_ERROR(CONST::ERRORS::cov_mat_req); + break; + } + case kfixed_spherical: + { + if (!m_params.radius_set) GMUM_ERROR(CONST::ERRORS::radius_req); + break; + } + case kno_type: + { + GMUM_ERROR(CONST::ERRORS::cluster_rec_error); + break; + } +#ifdef RCPP_INTERFACE + case kcustom: + { + if (!m_params.function) GMUM_ERROR(CONST::ERRORS::function_name_req); + break; + } +#endif + case kmix: + { + BOOST_FOREACH(boost::shared_ptr < ClusterParams > cluster, + m_params.clusters) + { + switch (cluster->type) { + case kfixed_covariance: + { + ClusterFixedCovarianceParams &ptr = + static_cast(*cluster); + if (!ptr.cov_mat_set) + GMUM_ERROR(CONST::ERRORS::cov_mat_req); + break; + } + case kfixed_spherical: + { + ClusterSphericalFixedRParams &ptr = + static_cast(*cluster); + if (!ptr.radius_set) + GMUM_ERROR(CONST::ERRORS::radius_req); + break; + } + case kno_type: + { + GMUM_ERROR(CONST::ERRORS::cluster_rec_error); + break; + } +#ifdef RCPP_INTERFACE + case kcustom: + { + ClusterCustomParams &ptr = + static_cast(*cluster); + if (!ptr.function) + GMUM_ERROR(CONST::ERRORS::function_name_req); + break; + } +#endif + default: + break; + } + } + break; + } + default: + break; + } +} + +void CecConfiguration::set_method_type(const std::string type) { + if (m_params.clusters.size() > 0) + m_params.cluster_type = kmix; + else { + if (type.compare(CONST::CLUSTERS::standard) == 0) { + m_params.cluster_type = kstandard; + } else if (type.compare(CONST::CLUSTERS::fixed_covariance) == 0) { + m_params.cluster_type = kfixed_covariance; + } else if (type.compare(CONST::CLUSTERS::diagonal) == 0) { + m_params.cluster_type = kdiagonal; + } else if (type.compare(CONST::CLUSTERS::spherical) == 0) { + m_params.cluster_type = kspherical; + } else if (type.compare(CONST::CLUSTERS::fixed_spherical) == 0) { + m_params.cluster_type = kfixed_spherical; + } else if (type.compare(CONST::CLUSTERS::custom) == 0) { + m_params.cluster_type = kcustom; + } else{ + GMUM_ERROR(CONST::ERRORS::cluster_rec_error); + } + } +} + +void CecConfiguration::set_r(const double radius) { + if (radius != 0 && m_params.clusters.empty()) { + m_params.radius_set = true; + m_params.radius = radius; + } else + m_params.radius_set = false; +} + +#ifdef RCPP_INTERFACE +void CecConfiguration::set_function(Rcpp::Function function) { + if (m_params.clusters.empty()) { + m_params.function = boost::make_shared(function); + } +} +#endif + +void CecConfiguration::set_it_max(int it_max) { + m_params.it_max = it_max; +} + +void CecConfiguration::set_algorithm(const std::string algorithm) +{ + m_params.algorithm.reset(new Hartigan(m_params.log_nclusters, m_params.log_energy, m_params.it_max)); +} + +void CecConfiguration::set_seed(int seed) +{ + m_params.seed = seed; +} diff --git a/src/cec/cec_module.cpp b/src/cec/cec_module.cpp new file mode 100644 index 00000000..2a519818 --- /dev/null +++ b/src/cec/cec_module.cpp @@ -0,0 +1,56 @@ +#ifndef CEC_MODULE_H +#define CEC_MODULE_H + +#ifdef RCPP_INTERFACE + +#include +using namespace Rcpp; + +class CecConfiguration; +class CecModel; + +RCPP_EXPOSED_CLASS (CecConfiguration) +RCPP_EXPOSED_CLASS (CecModel) + +#include "cec_configuration.hpp" +#include "cec.hpp" +using namespace gmum; + +RCPP_MODULE(cec) { + class_("CecConfiguration") + .constructor() + .method(".setDataSet", &CecConfiguration::set_data_set) + .method(".setEps", &CecConfiguration::set_eps) + .method(".setMix", &CecConfiguration::set_mix) + .method(".setNrOfClusters", &CecConfiguration::set_nclusters) + .method(".setLogEnergy", &CecConfiguration::set_log_energy) + .method(".setLogCluster", &CecConfiguration::set_log_cluster) + .method(".setNstart", &CecConfiguration::set_nstart) + .method(".setCentroids", &CecConfiguration::set_centroids) + .method(".setMethodInit", &CecConfiguration::set_method_init) + .method(".setMethodType", &CecConfiguration::set_method_type) + .method(".setCov", &CecConfiguration::set_cov) + .method(".setR", &CecConfiguration::set_r) + .method(".setFunction", &CecConfiguration::set_function) + .method(".setItmax", &CecConfiguration::set_it_max) + .method(".setAlgorithm", &CecConfiguration::set_algorithm) + .method(".setSeed", &CecConfiguration::set_seed); + + class_("CecModel") + .constructor() + .method(".runAll", &CecModel::loop) + .method(".runOneIteration", &CecModel::single_loop) + .method(".getEnergy", &CecModel::get_energy) + .method(".getClustering", &CecModel::get_assignment) + .method(".getCenters", &CecModel::centers) + .method(".getCovMatrix", &CecModel::cov) + .method(".predict", &CecModel::predict) + .method(".getLogNumberOfClusters", &CecModel::get_nclusters) + .method(".getLogEnergy", &CecModel::get_energy_history) + .method(".getIterations", &CecModel::iters) + .method(".getDataset", &CecModel::get_points); +} + +#endif + +#endif diff --git a/src/cec/centroids_assignment.cpp b/src/cec/centroids_assignment.cpp new file mode 100644 index 00000000..e4a41e30 --- /dev/null +++ b/src/cec/centroids_assignment.cpp @@ -0,0 +1,9 @@ +#include "centroids_assignment.hpp" + +namespace gmum { + +void CentroidsAssignment::operator()(std::vector &assignment) { + assign_points(assignment, m_centroids, m_points); +} + +} diff --git a/src/cec/cluster.cpp b/src/cec/cluster.cpp new file mode 100644 index 00000000..675b3a64 --- /dev/null +++ b/src/cec/cluster.cpp @@ -0,0 +1,346 @@ +#include "cluster.hpp" +#include +//#include "../../inst/include/cec/cluster.hpp" +// Added those because mingw is stupid. Don't remove! +#define M_E 2.71828182845904523536 +#define M_PI 3.14159265358979323846 + +namespace gmum { + +Cluster::Cluster(int count, const arma::rowvec &mean) : + m_count(count), m_mean(mean) { + m_n = m_mean.n_elem; + m_entropy = 0.0; +} + +Cluster::Cluster(unsigned int id, const std::vector &assignment, + const arma::mat &points) { + initialize_mean(id, assignment, points); + if (m_count == 0) + throw(NoPointsInCluster()); + + m_n = points.n_cols; + m_entropy = 0.0; +} + +arma::rowvec Cluster::initialize_mean(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + int dimention = points.n_cols; + m_count = 0; + m_mean = arma::rowvec(dimention, arma::fill::zeros); + for (unsigned int i = 0; i < points.n_rows; i++) { + if (assignment[i] == id) { + m_mean += points.row(i); + m_count += 1; + } + } + m_mean = m_mean / m_count; + + return m_mean; +} + +int Cluster::size() const { + return m_count; +} + +arma::rowvec Cluster::get_mean() { + return m_mean; +} + +double Cluster::entropy() const { + return m_entropy; +} + +void ClusterUseCovMat::initialize_cov_mat(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + int dimension = points.n_cols; + arma::rowvec m = m_mean; + arma::mat out(dimension, dimension, arma::fill::zeros); + for (unsigned int i = 0; i < points.n_rows; i++) + if (assignment[i] == id) { + arma::rowvec point = points.row(i); + arma::rowvec tmp = point - m; + out += (tmp.t() * tmp) / (m_count); + } + + m_cov_mat = out; + m_cov_mat_tmp = m_cov_mat; +} + +ClusterUseCovMat::ClusterUseCovMat(int count, const arma::rowvec &mean, + const arma::mat &cov_mat) : + Cluster(count, mean), m_cov_mat(cov_mat), m_cov_mat_tmp(cov_mat) { +} + +ClusterUseCovMat::ClusterUseCovMat(unsigned int id, + const std::vector &assignment, const arma::mat &points) : + Cluster(id, assignment, points) { + initialize_cov_mat(id, assignment, points); +} + +double ClusterUseCovMat::entropy_after_add_point(const arma::rowvec &point) { + arma::rowvec r = m_mean - point; + //arma::rowvec nmean = ((m_count) * m_mean + point) / (m_count + 1); + m_cov_mat_tmp = (1.0 * (m_count) / (1 + m_count)) + * (m_cov_mat + (r.t() * r) / (m_count + 1)); + return calculate_entropy(m_n, m_cov_mat_tmp); +} + +double ClusterUseCovMat::entropy_after_remove_point(const arma::rowvec &point) { + arma::rowvec r = m_mean - point; + //arma::rowvec nmean = ((m_count) * m_mean - point) / (m_count - 1); + m_cov_mat_tmp = (1.0 * (m_count) / (m_count - 1)) + * (m_cov_mat - (r.t() * r) / (m_count - 1)); + return calculate_entropy(m_n, m_cov_mat_tmp); +} + +void ClusterUseCovMat::add_point(const arma::rowvec &point) { + m_count = m_count + 1; + arma::rowvec r = m_mean - point; + m_mean = ((m_count - 1) * m_mean + point) / (m_count); + m_cov_mat = (1.0 * (m_count - 1) / m_count) + * (m_cov_mat + (r.t() * r) / m_count); + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + +void ClusterUseCovMat::remove_point(const arma::rowvec &point) { + m_count = m_count - 1; + arma::rowvec r = m_mean - point; + m_mean = ((m_count + 1) * m_mean - point) / m_count; + m_cov_mat = (1.0 * (m_count + 1) / m_count) + * (m_cov_mat - (r.t() * r) / m_count); + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + +arma::mat ClusterUseCovMat::get_cov_mat(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + return m_cov_mat; +} + +ClusterOnlyTrace::ClusterOnlyTrace(int count, const arma::rowvec & mean, + double cov_mat_trace) : + Cluster(count, mean), m_cov_mat_trace(cov_mat_trace) { +} + +ClusterOnlyTrace::ClusterOnlyTrace(unsigned int id, + const std::vector & assignment, const arma::mat & points) : + Cluster(id, assignment, points) { + initialize_mean(id, assignment, points); + compute_cov_mat_trace(id, assignment, points); +} + +void ClusterOnlyTrace::compute_cov_mat_trace(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + m_cov_mat_trace = 0.0; + for (unsigned int i = 0; i < points.n_rows; i++) + if (assignment[i] == id) { + arma::rowvec point = points.row(i); + arma::rowvec tmp = point - m_mean; + m_cov_mat_trace += dot(tmp, tmp); + } + m_cov_mat_trace /= m_count; +} + +void ClusterOnlyTrace::add_point(const arma::rowvec & point) { + int ncount = m_count + 1; + arma::rowvec nmean = (m_count * m_mean + point) / (ncount); + arma::rowvec r = nmean - point; + arma::rowvec mean_diff = m_mean - nmean; + double ntrace = ((m_cov_mat_trace + dot(mean_diff, mean_diff)) * m_count + + dot(r, r)) / ncount; + m_cov_mat_trace = ntrace; + m_count = ncount; + m_mean = nmean; + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +double ClusterOnlyTrace::entropy_after_remove_point(const arma::rowvec &point) { + int ncount = m_count - 1; + arma::rowvec nmean = (m_count * m_mean - point) / (ncount); + arma::rowvec mean_diff = m_mean - nmean; + arma::rowvec r = nmean - point; + double ntrace = ((m_cov_mat_trace + dot(mean_diff, mean_diff)) * m_count + - dot(r, r)) / ncount; + return calculate_entropy(ntrace, m_n); +} + +double ClusterOnlyTrace::entropy_after_add_point(const arma::rowvec & point) { + int ncount = m_count + 1; + arma::rowvec nmean = (m_count * m_mean + point) / (ncount); + arma::rowvec r = nmean - point; + arma::rowvec mean_diff = m_mean - nmean; + double ntrace = ((m_cov_mat_trace + dot(mean_diff, mean_diff)) * m_count + + dot(r, r)) / ncount; + return calculate_entropy(ntrace, m_n); +} + +void ClusterOnlyTrace::remove_point(const arma::rowvec &point) { + int ncount = m_count - 1; + arma::rowvec nmean = (m_count * m_mean - point) / (ncount); + arma::rowvec mean_diff = m_mean - nmean; + arma::rowvec r = nmean - point; + double ntrace = ((m_cov_mat_trace + dot(mean_diff, mean_diff)) * m_count + - dot(r, r)) / ncount; + m_cov_mat_trace = ntrace; + m_count = ncount; + m_mean = nmean; + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +double ClusterOnlyTrace::get_cov_mat_trace() { + return m_cov_mat_trace; +} + +arma::mat ClusterOnlyTrace::get_cov_mat(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + arma::mat out(m_n, m_n, arma::fill::zeros); + for (unsigned int i = 0; i < points.n_rows; i++) { + if (assignment[i] == id) { + arma::rowvec point = points.row(i); + arma::rowvec tmp = point - m_mean; + out += (tmp.t() * tmp) / (m_count); + } + } + return out; +} + +double ClusterStandard::calculate_entropy(int n, const arma::mat &cov_mat) { + return n * log(2 * M_PI * M_E) / 2 + log(arma::det(cov_mat)) / 2; +} + +ClusterStandard::ClusterStandard(int count, const arma::rowvec &_mean, + const arma::mat &cov_mat) : + ClusterUseCovMat(count, _mean, cov_mat) { + m_entropy = calculate_entropy(m_n, cov_mat); +} + +ClusterStandard::ClusterStandard(unsigned int id, + const std::vector &assignment, const arma::mat &points) : + ClusterUseCovMat(id, assignment, points) { + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + + arma::mat ClusterStandard::get_cov_mat(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + return m_cov_mat; + } + +ClusterFixedCovariance::ClusterFixedCovariance(const arma::mat & sigma, unsigned int id, + const std::vector &assignment, const arma::mat &points) : + ClusterUseCovMat(id, assignment, points) { + m_sigma_det = arma::det(sigma); + m_inv_sigma = arma::inv(sigma); + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + +ClusterFixedCovariance::ClusterFixedCovariance(const arma::mat& inv_sigma, double sigma_det, + int count, const arma::rowvec & mean, const arma::mat & cov_mat) : + ClusterUseCovMat(count, mean, cov_mat), m_inv_sigma(inv_sigma), m_sigma_det( + sigma_det) { + m_entropy = calculate_entropy(m_n, cov_mat); +} + +double ClusterFixedCovariance::calculate_entropy(int n, const arma::mat &cov_mat) { + return n * log(2 * M_PI) / 2 + arma::trace(m_inv_sigma * cov_mat) / 2 + + log(m_sigma_det) / 2; +} + + arma::mat ClusterFixedCovariance::get_cov_mat(unsigned int id, + const std::vector &assignment, const arma::mat &points) { + return arma::inv(m_inv_sigma); + } + +ClusterSphericalFixedR::ClusterSphericalFixedR(double r, unsigned int id, + const std::vector &assignment, const arma::mat &points) : + ClusterOnlyTrace(id, assignment, points), m_r(r) { + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +ClusterSphericalFixedR::ClusterSphericalFixedR(double r, int count, + const arma::rowvec & mean, double cov_mat_trace) : + ClusterOnlyTrace(count, mean, cov_mat_trace), m_r(r) { + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +double ClusterSphericalFixedR::calculate_entropy(double cov_mat_trace, int n) { + return n * log(2 * M_PI) / 2 + cov_mat_trace / (2 * m_r) + n * log(m_r) / 2; +} + + arma::mat ClusterSphericalFixedR::get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points){ + arma::mat cov = ClusterOnlyTrace::get_cov_mat(id, assignment, points); + cov = arma::eye(cov.n_cols, cov.n_cols) * arma::trace(cov) / cov.n_cols; + return cov * m_r; + } + +ClusterSpherical::ClusterSpherical(unsigned int id, + const std::vector &assignment, const arma::mat &points) : + ClusterOnlyTrace(id, assignment, points) { + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +ClusterSpherical::ClusterSpherical(int count, const arma::rowvec &mean, + double cov_mat_trace) : + ClusterOnlyTrace(count, mean, cov_mat_trace) { + m_entropy = calculate_entropy(m_cov_mat_trace, m_n); +} + +double ClusterSpherical::calculate_entropy(double cov_mat_trace, int n) { + return n * log(2 * M_PI * M_E / n) / 2 + n * log(cov_mat_trace) / 2; +} + + arma::mat ClusterSpherical::get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points){ + arma::mat cov = ClusterOnlyTrace::get_cov_mat(id, assignment, points); + cov = arma::eye(cov.n_cols, cov.n_cols) * arma::trace(cov) / cov.n_cols; + return cov; + } + +ClusterDiagonal::ClusterDiagonal(unsigned int id, + const std::vector &assignment, const arma::mat &points) : + ClusterUseCovMat(id, assignment, points) { + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + +ClusterDiagonal::ClusterDiagonal(int count, const arma::rowvec & mean, + const arma::mat & cov_mat) : + ClusterUseCovMat(count, mean, cov_mat) { + m_entropy = calculate_entropy(m_n, cov_mat); +} + +double ClusterDiagonal::calculate_entropy(int n, const arma::mat &cov_mat) { + return n * log(2 * M_PI * M_E) / 2 + + log(arma::det(arma::diagmat(cov_mat))) / 2; +} + + arma::mat ClusterDiagonal::get_cov_mat(unsigned int id, + const std::vector &assignment, + const arma::mat &points){ + m_cov_mat = arma::diagmat(m_cov_mat); + return m_cov_mat; + } + +ClusterFixedCovariance *ClusterFixedCovariance::clone() { + return new ClusterFixedCovariance(m_inv_sigma, m_sigma_det, m_count, m_mean, + m_cov_mat); +} + +ClusterSphericalFixedR *ClusterSphericalFixedR::clone() { + return new ClusterSphericalFixedR(m_r, m_count, m_mean, m_cov_mat_trace); +} + +ClusterSpherical* ClusterSpherical::clone() { + return new ClusterSpherical(m_count, m_mean, m_cov_mat_trace); +} + +ClusterStandard* ClusterStandard::clone() { + return new ClusterStandard(m_count, m_mean, m_cov_mat); +} + +ClusterDiagonal* ClusterDiagonal::clone() { + return new ClusterDiagonal(m_count, m_mean, m_cov_mat); +} + +} diff --git a/src/cec/cluster_custom_function.cpp b/src/cec/cluster_custom_function.cpp new file mode 100644 index 00000000..158f41c8 --- /dev/null +++ b/src/cec/cluster_custom_function.cpp @@ -0,0 +1,36 @@ +#include "cluster_custom_function.hpp" + +namespace gmum { + +#ifdef RCPP_INTERFACE + +ClusterCustomFunction::ClusterCustomFunction(int count, + const arma::rowvec& mean, const arma::mat& cov_mat, + boost::shared_ptr function) : +ClusterUseCovMat(count, mean, cov_mat), m_function(function) { + m_entropy = calculate_entropy(m_n, cov_mat); +} + +ClusterCustomFunction::ClusterCustomFunction(unsigned int id, + const std::vector &assignment, const arma::mat &points, + boost::shared_ptr function) : +ClusterUseCovMat(id, assignment, points), m_function(function) { + m_entropy = calculate_entropy(m_n, m_cov_mat); +} + +double ClusterCustomFunction::calculate_entropy(int n, + const arma::mat &cov_mat) { + + return Rcpp::as( + (*m_function)(Rcpp::Named("m", Rcpp::wrap(n)), + Rcpp::Named("sigma", Rcpp::wrap(cov_mat)))); +} + +ClusterCustomFunction* ClusterCustomFunction::clone() +{ + return new ClusterCustomFunction(m_count, m_mean, m_cov_mat, m_function); +} + +#endif + +} diff --git a/src/cec/hartigan.cpp b/src/cec/hartigan.cpp new file mode 100644 index 00000000..ed205a13 --- /dev/null +++ b/src/cec/hartigan.cpp @@ -0,0 +1,255 @@ +#include +#include +#include "hartigan.hpp" + + +namespace gmum { + +Hartigan::Hartigan(bool log_nclusters, bool log_energy, int max_iter) : + Algorithm(log_nclusters, log_energy, max_iter) { +} + +TotalResult Hartigan::loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters) { + TotalResult result; + SingleResult sr; + do { + sr = single_loop(points, assignment, kill_threshold, clusters); + result.append(sr, m_log_nclusters, m_log_energy); + if ((m_max_iter > 0) && (m_max_iter <= result.iterations)) { + // if max iter parameter is enabled and there are already max_iter iterations passed, break + break; + } + + } while (sr.switched > 0); + return result; +} + +double Hartigan::calc_energy(double cross_entropy, int points_in_cluster, + int npoints) { + double p = 1.0 * points_in_cluster / npoints; + return p * (cross_entropy - log(p)); +} + +double Hartigan::calc_energy_change(const Cluster& a, const Cluster &b, + int npoints) { + double energy_a = calc_energy(a.entropy(), a.size(), npoints); + double energy_b = calc_energy(b.entropy(), b.size(), npoints); + return energy_a - energy_b; +} + +SingleResult Hartigan::single_loop(const arma::mat &points, + std::vector &assignment, double kill_threshold, + std::vector &clusters_raw) { + + int switched = 0; //number of point which has been moved to another cluster + int dimension = points.n_cols; + unsigned int npoints = points.n_rows; + SingleResult sr; + + // LOG(m_logger, LogLevel::INFO_LEVEL, to_string(clusters.size())); + + for (unsigned int i = 0; i < npoints; i++) { + unsigned int source = assignment[i]; + arma::rowvec point = points.row(i); + double before_source_energy = calc_energy( + clusters_raw[source]->entropy(), clusters_raw[source]->size(), + npoints); + + double source_energy_change = calc_energy( + clusters_raw[source]->entropy_after_remove_point(point), + clusters_raw[source]->size() - 1, npoints) + - before_source_energy; + + double best_energy_change = 0; + int best_cluster = -1; + + for (unsigned int k = 0; k < clusters_raw.size(); k++) { + if (k != source) { + double energy_change; + try { + double before_target_energy = calc_energy( + clusters_raw[k]->entropy(), clusters_raw[k]->size(), + npoints); + + double target_energy_change = calc_energy( + clusters_raw[k]->entropy_after_add_point(point), + clusters_raw[k]->size() + 1, npoints) + - before_target_energy; + + energy_change = target_energy_change + + source_energy_change; + } catch (std::exception &e) { + LOG(m_logger, LogLevel::ERR_LEVEL, "Energy change calculation"); + LOG(m_logger, LogLevel::ERR_LEVEL, dimension); + throw(e); + //return SingleResult(switched, clusters.size(), 0); + } + + if (!std::isnormal(energy_change)) { + continue; // ignore this cluster, it will be removed later + } + + if (energy_change < best_energy_change) { //newEnergy < oldEnergy + best_cluster = k; + best_energy_change = energy_change; + } + } //for iterates clusters + } // for clusters + if (best_cluster != -1) { + switched++; + + clusters_raw[source]->remove_point(point); + clusters_raw[best_cluster]->add_point(point); + + //point moved from cluster source to k - update assignment + assignment[i] = best_cluster; + + try { + //if cluster has number of members lower than threshold, remove the cluster + //threshold is fraction of all points + if (clusters_raw[source]->size() < std::max(int(kill_threshold * npoints), dimension + 1)) + { + remove_cluster(source, points, assignment, clusters_raw); + } + } catch (std::exception &e) { + //LOG(LogLevel::ERR, e.what()); + //TODO: why not terminating here? + LOG(m_logger, LogLevel::ERR_LEVEL, "removeCluster"); + throw(e); + } + } + } //for iterates points + + double curr_cluster_energy; + bool end_cleaning; + do + { + if(clusters_raw.size() == 1) + { + break; + } + + end_cleaning = true; + for (unsigned int i = 0; i < clusters_raw.size(); ++i) { + curr_cluster_energy = calc_energy(clusters_raw[i]->entropy(), clusters_raw[i]->size(), npoints); + if(!std::isnormal(curr_cluster_energy)) + { + remove_cluster(i, points, assignment, clusters_raw); + end_cleaning = false; + break; + } + if(!end_cleaning) { + break; + } + } + } while(!end_cleaning); + + //LOG(m_logger, LogLevel::INFO_LEVEL, energy); + + double energy = 0; + for (unsigned int i = 0; i < clusters_raw.size(); ++i) { + curr_cluster_energy = calc_energy(clusters_raw[i]->entropy(), clusters_raw[i]->size(), npoints); + energy += curr_cluster_energy; + } + + sr.energy = energy; + sr.switched = switched; + sr.nclusters = clusters_raw.size(); + return sr; +} + +void Hartigan::remove_cluster(unsigned int source, const arma::mat &points, + std::vector &assignment, + std::vector &clusters) { + //delete cluster + std::vector::iterator it = clusters.begin() + source; + if(it != clusters.end()) + { + delete *it; + clusters.erase(it); + } + + unsigned int npoints = points.n_rows; + + if(clusters.size() == 1) + { + for(unsigned int j = 0; j < npoints; ++j) + { + if(assignment[j] == source) + { + clusters[0]->add_point(points.row(j)); + } + assignment[j] = 0; + } + return; + } + + //assign points of erased cluster + for (unsigned int j = 0; j < npoints; j++) { + + //find point of deleted cluster + if (assignment[j] == source) { + + arma::rowvec point_to_assign = points.row(j); + int min_energy_change_element_index = -1; + double min_energy_change = std::numeric_limits::max(); + + //find the best cluster to assign the point to it + for (unsigned int k = 0; k < clusters.size(); k++) { + + double before_energy = calc_energy(clusters[k]->entropy(), + clusters[k]->size(), npoints); + + clusters[k]->add_point(point_to_assign); + + //std::cerr<entropy()<entropy(), + clusters[k]->size(), npoints) - before_energy; + + clusters[k]->remove_point(point_to_assign); + + if(!std::isnormal(energy_change)) { continue; } // ignore degenerated clusters + + if (energy_change < min_energy_change) { + min_energy_change = energy_change; + min_energy_change_element_index = k; + } + + } + ASSERT(min_energy_change_element_index > -1); + if(min_energy_change_element_index == -1) + { + // all clusters are degenerated, find first cluster that is not source cluster + for(unsigned int k = 0; k < clusters.size(); ++k) + { + if(k != source) + { + min_energy_change_element_index = k; + break; + } + } + } + + //we are here adding and then removing + clusters[min_energy_change_element_index]->add_point( + point_to_assign); + assignment[j] = min_energy_change_element_index; + + } else if (assignment[j] > source) { + assignment[j]--; + } + //number of clusters is expected to be small in comparison to number + //of data points. When you remove a cluster you decrease assignment of all + //points belonging to clusters with higher position in vector, in order + //to keep assignment adequate. + } +} +double Hartigan::entropy(boost::shared_ptr ptr_to_cluster, + int npoints) { + double p = (1.0 * ptr_to_cluster->size()) / npoints; + return p * ptr_to_cluster->entropy() + p * log(-p); +} +} diff --git a/src/cec/kmeanspp_assignment.cpp b/src/cec/kmeanspp_assignment.cpp new file mode 100644 index 00000000..f485874c --- /dev/null +++ b/src/cec/kmeanspp_assignment.cpp @@ -0,0 +1,90 @@ +#include "kmeanspp_assignment.hpp" + +namespace gmum { + +void KmeansppAssignment::operator()(std::vector &assignment) { + + std::vector centers; + centers.reserve(m_nclusters); + + unsigned int npoints = assignment.size(); + boost::random::mt19937 gen(m_seed); + + //select points for centers + std::list selected; + unsigned int nsections = m_nclusters; + unsigned int section = npoints / nsections; + if (section <= 1) + for (unsigned int i = 0; i < npoints; ++i) + selected.push_back(i); + else { + boost::random::uniform_int_distribution<> uniform(0, section - 1); + unsigned int k = 0; + //nrOfPoints > section + for (; k < npoints - section; k += section) { + unsigned int target = k + uniform(gen); + selected.push_back(target); + } + k += uniform(gen); + if (k >= npoints) + k = npoints - 1; + selected.push_back(k); + } + + int centers_left = m_nclusters; + //first center + centers.push_back(selected.front().point_number); + selected.pop_front(); + --centers_left; + + //choose other centers + float bernoulli_p = 0.8; + boost::random::bernoulli_distribution<> bernoulli(bernoulli_p); + + for (; centers_left > 0; --centers_left) { + calculate_distance(centers, selected, m_points); + choose(bernoulli, gen, selected); + centers.push_back(selected.front().point_number); + selected.pop_front(); + } + + assign_points(assignment, centers, m_points); +} + +void calculate_distance(const std::vector ¢ers, + std::list &selected, const arma::mat &points) { + + for (std::list::iterator it = selected.begin(); it != selected.end(); + ++it) { + arma::rowvec point = points.row(it->point_number); + float distance = std::numeric_limits::max(); + + for (unsigned int i = 0; i < centers.size(); ++i) { + arma::rowvec vec = points.row(centers[i]) - point; + float temp_dist = arma::as_scalar(vec * vec.t()); + if (distance > temp_dist) + distance = temp_dist; + } + + it->distance = distance; + } + + selected.sort(); +} + +std::list::iterator choose( + boost::random::bernoulli_distribution<> &bernoulli, + boost::random::mt19937 &gen, std::list &selected) { + + std::list::iterator it = selected.begin(); + for (; !bernoulli(gen) && it != selected.end(); ++it) + ; + + if (it == selected.end()) { + it = selected.end(); + --it; + } + + return it; +} +} diff --git a/src/cec/random_assignment.cpp b/src/cec/random_assignment.cpp new file mode 100644 index 00000000..103206f3 --- /dev/null +++ b/src/cec/random_assignment.cpp @@ -0,0 +1,37 @@ +#include "random_assignment.hpp" + +#ifdef RCPP_INTERFACE +#include +#endif + +namespace gmum { + +void RandomAssignment::operator()(std::vector &assignment) { + + std::vector centers; + centers.reserve(m_nclusters); + + unsigned int npoints = assignment.size(); + boost::random::mt19937 gen(m_seed); + boost::random::uniform_int_distribution<> dist(0, npoints - 1); + + if (m_nclusters > npoints) { + GMUM_ERROR("Number of clusters cannot be greater than number of points"); + } + + for (unsigned int i = 0; i < m_nclusters; i++) { + while (true) { + unsigned int center = dist(gen); + if (std::find(centers.begin(), centers.end(), center) + == centers.end()) { + /* centers does not contain dist(gen) */ + centers.push_back(center); + break; + } + } + } + + assign_points(assignment, centers, m_points); +} + +} diff --git a/src/gng/gng_algorithm.cpp b/src/gng/gng_algorithm.cpp new file mode 100644 index 00000000..b39632c0 --- /dev/null +++ b/src/gng/gng_algorithm.cpp @@ -0,0 +1,1022 @@ +/* + * File: GNGAlgorithm.cpp + * Author: staszek "kudkudak" jastrzebski gmail.com> + * + * Created on 11 sierpieĹ„ 2012, 10:02 + */ + +//TODO: refactor getExample +#include +#include +#include +#include +#include + +using namespace boost; +using namespace gmum; +using namespace std; + +namespace gmum { + +GNGNode ** GNGAlgorithm::LargestErrorNodesLazy() { + GNGNode ** largest = new GNGNode*[2]; + GNGNode * gng_node; + + FOREACH(int it, errorHeap.getLazyList()) + { + gng_node = &m_g[it]; + errorHeap.insert(gng_node->nr, gng_node->error); + } + + errorHeap.getLazyList().clear(); + + ErrorNode max; + //Extract max until you get correct one (that is not lazy) + do { + max = errorHeap.extractMax(); + + DBG_PTR(m_logger, 4, "GNGAlgorithm::LargestErrorLazy::found max " + to_string(max.i)); + + GNGNode * gng_node = &m_g[max.i]; + if (gng_node->error_cycle != c) { + fixErrorNew(gng_node); + errorHeap.update(gng_node->nr, gng_node->error); + } else { + largest[0] = gng_node; + int j = 0; + double error = 0.0; + DBG_PTR(m_logger, 4, "GNGAlgorithm::LargestErrorLazy::found max " + to_string(max.i)); + + BOOST_FOREACH(GNGEdge * edg, *largest[0]) + { + ++j; + fixErrorNew(&m_g[(edg)->nr]); + + if (j == 1) { + largest[1] = &m_g[(edg)->nr]; + error = largest[1]->error; + ; + continue; + } + + double new_error = m_g[(edg)->nr].error; + + if (error < new_error) { + error = new_error; + largest[1] = &m_g[(edg)->nr]; + } + } + break; + } + + } while (1); DBG_PTR(m_logger, 3, "GNGAlgorithm::LargestErrorLazy::returning"); + return largest; + +} + +GNGGraph* GNGGraphAccessHack::pool = 0; + +GNGAlgorithm::GNGAlgorithm(GNGGraph * g, GNGDataset* db, + double * boundingbox_origin, double * boundingbox_axis, double l, + int max_nodes, int max_age, double alpha, double betha, double lambda, + double eps_w, double eps_n, int dim, bool uniformgrid_optimization, + bool lazyheap_optimization, unsigned int utility_option, + double utility_k, int max_iter, int seed, boost::shared_ptr logger) : + m_g(*g), g_db(db), c(0), s(0), m_max_nodes(max_nodes), m_max_age( + max_age), m_alpha(alpha), m_betha(betha), m_lambda(lambda), m_eps_w( + eps_w), m_eps_n(eps_n), m_density_threshold(0.1), m_grow_rate( + 1.5), errorHeap(), dim(dim), m_toggle_uniformgrid( + uniformgrid_optimization), m_toggle_lazyheap( + lazyheap_optimization), m_utility_option( + utility_option), m_mean_error(1000), m_utility_k(utility_k), + max_iter(max_iter), m_logger( + logger), m_iteration(0), + m_gng_status(GNG_TERMINATED), + m_gng_status_request(GNG_TERMINATED), mt_rand(seed) { + + DBG_PTR(m_logger, 1, "GNGAlgorithm:: Constructing object"); + DBG_PTR(m_logger, 10, + "GNGAlgorithm::Constructed object with utility " + + to_string(utility_option) + " " + to_string(utility_k)); + + if (m_toggle_uniformgrid) { + ug = new UniformGrid, Node, int>(boundingbox_origin, + boundingbox_axis, l, dim, m_grow_rate, m_density_threshold, 0.4, + m_logger); + + GNGGraphAccessHack::pool = &m_g; + + ug->setDistFunction(GNGGraphAccessHack::dist); + + // Restore uniform grid state + int maximum_index = m_g.get_maximum_index(); + REP(i, maximum_index + 1) + if (m_g.existsNode(i)) + ug->insert(m_g[i].position, m_g[i].nr); + + } + + if (m_toggle_lazyheap) { + int maximum_index = m_g.get_maximum_index(); + REP(i, maximum_index + 1) + if (m_g.existsNode(i)) + setErrorNew(&m_g[i], m_g[i].error); + } + + m_betha_powers_size = m_lambda * 10; + m_betha_powers = new double[m_betha_powers_size]; + + REP(i, m_betha_powers_size) + m_betha_powers[i] = std::pow(m_betha, (double) (i)); + + m_betha_powers_to_n_length = m_max_nodes * 2; + m_betha_powers_to_n = new double[m_max_nodes * 2]; + + REP(i, m_max_nodes * 2) + m_betha_powers_to_n[i] = std::pow(m_betha, m_lambda * (double) (i)); + DBG_PTR(m_logger, 1, "GNGAlgorithm:: Constructed object"); +} + +void GNGAlgorithm::randomInit() { + + DBG_PTR(m_logger, 3, "randomInit::Drawing examples"); + + int ex1 = g_db->drawExample(); + int ex2 = g_db->drawExample(); + + DBG_PTR(m_logger, 3, "randomInit::Drawn 2"); + int index = 0; + while (ex2 == ex1 && index < 100) { + ++index; + ex2 = g_db->drawExample(); + } + DBG_PTR(m_logger, 3, "randomInit::database_size = " + to_string(g_db->size())); + DBG_PTR(m_logger, 3, + "randomInit::drawn " + to_string(ex1) + " " + to_string(ex2)); + + const double * ex1_ptr = g_db->getPosition(ex1); + const double * ex1_extra_ptr = g_db->getExtraData(ex1); + const double * ex2_ptr = g_db->getPosition(ex2); + const double * ex2_extra_ptr = g_db->getExtraData(ex2); + + m_g.newNode(ex1_ptr); + m_g.newNode(ex2_ptr); + + if (ex1_extra_ptr) + m_g[0].extra_data = ex1_extra_ptr[0]; + if (ex2_extra_ptr) + m_g[1].extra_data = ex2_extra_ptr[0]; + + DBG_PTR(m_logger, 3, + "randomInit::created nodes graph size=" + + to_string(m_g.get_number_nodes())); + +#ifdef GMUM_DEBUG_2 + ASSERT(m_g.get_number_nodes()==2); +#endif + + if (m_toggle_uniformgrid) { + ug->insert(m_g[0].position, 0); + ug->insert(m_g[1].position, 1); + } + + if (m_toggle_lazyheap) { + setErrorNew(&m_g[0], 0.0); + setErrorNew(&m_g[1], 0.0); + } + if (this->m_utility_option == BasicUtility) { + setUtility(0, 0.001); + setUtility(1, 0.001); + } +} + +void GNGAlgorithm::addNewNode() { + using namespace std; + + if (m_max_nodes <= m_g.get_number_nodes()) { + DBG_PTR(m_logger, 4, + "GNGAlgorith::AddNewNode:: achieved maximum number of nodes"); + return; + } + + LOG_PTR(m_logger, 10, "GNGAlgorith::AddNewNode "+to_string(m_g.get_number_nodes())); + + DBG_PTR(m_logger, 4, "GNGAlgorith::AddNewNode::start search"); + + if (m_toggle_lazyheap) + DBG_PTR(m_logger, 4, + "GNGAlgorithm::AddNewNode:: " + to_string(m_toggle_lazyheap) + + " : )= toggle_lazyheap"); + + GNGNode ** error_nodes_new; + + if (m_toggle_lazyheap) + error_nodes_new = LargestErrorNodesLazy(); + else + error_nodes_new = LargestErrorNodes(); + + DBG_PTR(m_logger, 4, "GNGAlgorith::AddNewNode::search completed"); + + if (!error_nodes_new[0] || !error_nodes_new[1]) + return; + + DBG_PTR(m_logger, 4, "GNGAlgorith::AddNewNode::search completed and successful"); + + + + double * position = new double[this->dim]; //param + + //TODO: < GNG_DIM? + for (int i = 0; i < this->dim; ++i) //param + position[i] = (error_nodes_new[0]->position[i] + + error_nodes_new[1]->position[i]) / 2; + + //In case pool has been reallocated + int er_nr1 = error_nodes_new[0]->nr, er_nr2 = error_nodes_new[1]->nr; + int new_node_index = m_g.newNode(&position[0]); + error_nodes_new[0] = &m_g[er_nr1]; + error_nodes_new[1] = &m_g[er_nr2]; + + //Vote for extra data + m_g[new_node_index].extra_data = (error_nodes_new[0]->extra_data + + error_nodes_new[1]->extra_data) / 2.0; + + if (m_toggle_uniformgrid) + ug->insert(m_g[new_node_index].position, new_node_index); + + DBG_PTR(m_logger, 4, "GNGAlgorith::AddNewNode::added " + to_string(m_g[new_node_index])); + + m_g.removeUDEdge(error_nodes_new[0]->nr, error_nodes_new[1]->nr); + + DBG_PTR(m_logger, 3, "GNGAlgorith::AddNewNode::removed edge beetwen " + + to_string(error_nodes_new[0]->nr) + " and" + to_string(error_nodes_new[1]->nr)); + DBG_PTR(m_logger, 2, + "GNGAlgorithm::AddNewNode::largest error node after removing edge : " + + to_string(*error_nodes_new[0])); + + m_g.addUDEdge(error_nodes_new[0]->nr, new_node_index); + + m_g.addUDEdge(new_node_index, error_nodes_new[1]->nr); + + DBG_PTR(m_logger, 3, "GNGAlgorith::AddNewNode::add edge beetwen " + + to_string(error_nodes_new[0]->nr) + " and" + to_string(new_node_index)); + + if (!m_toggle_lazyheap) { + decreaseError(error_nodes_new[0]); + decreaseError(error_nodes_new[1]); + setError(&m_g[new_node_index], + (error_nodes_new[0]->error + error_nodes_new[1]->error) / 2); + } else { + decreaseErrorNew(error_nodes_new[0]); + decreaseErrorNew(error_nodes_new[1]); + setErrorNew(&m_g[new_node_index], + (error_nodes_new[0]->error + error_nodes_new[1]->error) / 2); + } + + if (this->m_utility_option == BasicUtility) + this->setUtility(new_node_index, + 0.5 + * (getUtility(error_nodes_new[0]->nr) + + getUtility(error_nodes_new[1]->nr))); + + delete[] error_nodes_new; + DBG_PTR(m_logger, 3, "GNGAlgorith::AddNewNode::delete done"); +} + + +int GNGAlgorithm::predict(const std::vector & ex) { + + if (m_g.get_number_nodes() == 0) + return -1; //No node + + if (ex.size() != g_db->getGNGDim()) + throw BasicException("Wrong example dimensionality"); + + return _getNearestNeurons(&ex[0]).first; +} + +std::pair GNGAlgorithm::adapt(const double * ex, + const double * extra) { + DBG_PTR(m_logger, 4, "GNGAlgorith::Adapt::commence search"); + + std::pair nearest = _getNearestNeurons(ex); + GNGNode * nearest_0 = &m_g[nearest.first], * nearest_1 = &m_g[nearest.second]; + + + DBG_PTR(m_logger, 4, "GNGAlgorith::Adapt::found nearest nodes to the drawn example " + to_string(*nearest_0) + " " + to_string(*nearest_1)); + + double error = m_g.get_dist(nearest_0->position, ex); + + if (this->m_utility_option == BasicUtility) { + + DBG_PTR(m_logger, 4, "GNGAlgorithm::Adapt::setting utility"); + + double error_2 = m_g.get_dist(nearest_1->position, ex); + + this->setUtility(nearest_0->nr, + this->getUtility(nearest_0->nr) + error_2 - error); + } + + DBG_PTR(m_logger, 3, "GNGAlgorith::Adapt::increasing error"); + + if (!m_toggle_lazyheap) + increaseError(nearest_0, error); + else + increaseErrorNew(nearest_0, error); + + DBG_PTR(m_logger, 3, "GNGAlgorith::Adapt::accounted for the error"); + + if (m_toggle_uniformgrid) + ug->remove(nearest_0->position); + for (int i = 0; i < this->dim; ++i) + nearest_0->position[i] += m_eps_w * (ex[i] - nearest_0->position[i]); + + //Adapt to extra dimensionality if present (TODO: refactor) + if (extra) + nearest_0->extra_data = (nearest_0->extra_data + extra[0]) / 2.0; + + if (m_toggle_uniformgrid) + ug->insert(nearest_0->position, nearest_0->nr); + + if (nearest_0->edgesCount) { + FOREACH(GNGEdge * edg, *nearest_0) + { + if (m_toggle_uniformgrid) + ug->remove(m_g[(edg)->nr].position); + + for (int i = 0; i < this->dim; ++i) { //param accounting + m_g[(edg)->nr].position[i] += m_eps_n + * (ex[i] - m_g[(edg)->nr].position[i]); + } + + //Adapt to extra dimensionality if present (TODO: refactor) + if (extra) { + m_g[(edg)->nr].extra_data = (0.9 * m_g[(edg)->nr].extra_data + + extra[0] * 0.1); + } + + if (m_toggle_uniformgrid) + ug->insert(m_g[(edg)->nr].position, (edg)->nr); + } + } + + DBG_PTR(m_logger, 4, + "GNGAlgorith::Adapt::position of the winner and neighbour mutated"); + + if (!m_g.isEdge(nearest_0->nr, nearest_1->nr)) { + m_g.addUDEdge(nearest_0->nr, nearest_1->nr); + DBG_PTR(m_logger, 4, + "GNGAlgorith::Adapt::added edge beetwen " + + to_string(nearest_0->nr) + " and " + + to_string(nearest_1->nr)); + } + + bool BYPASS = false; + + DBG_PTR(m_logger, 4, "GNGAlgorith::Adapt::commence scan of edges"); + + //TODO: assuming here GNGNode not any arbitrary node :/ + GNGNode::EdgeIterator edg = nearest_0->begin(); + while (edg != nearest_0->end()) { + (*edg)->age++; + (((*edg)->rev))->age++; + + if ((*edg)->nr == nearest_1->nr) { + (*edg)->age = 0; + (((*edg)->rev))->age = 0; + } + + if ((*edg)->age > m_max_age) { + int nr = (*edg)->nr; + + //Note that this is O(E), but average number of edges is very small, so it is OK + edg = m_g.removeUDEdge(nearest_0->nr, nr); + + if (m_g[nr].edgesCount == 0 && this->m_utility_option == None) { + + DBG_PTR(m_logger, 8, "GNGAlgorith:: remove node because no edges"); + +#ifdef DEBUG_GMUM + FOREACH(GNGEdge* edg2, m_g[nr]) + { + CERR("WARNING: GNGAlgorithm:: edges count of neighbours of erased node, shouldn't happen! "); + } +#endif + + if (m_toggle_uniformgrid) + ug->remove(m_g[nr].position); + + DBG_PTR(m_logger, 8, + "GNGAlgorithm::Adapt() Erasing node " + + to_string(nr)); + DBG_PTR(m_logger, 8, + "GNGAlgorithm::Adapt() First coordinate " + + to_string(m_g[nr].position[0])); + + m_g.deleteNode(nr); + } + if (m_g[nearest_0->nr].edgesCount == 0 + && this->m_utility_option == None) { + + LOG_PTR(m_logger, 1, + "GNGAlgorithm::Adapt() remove node because no edges, shouldn't happen"); //Shouldn't happen + + if (m_toggle_uniformgrid) + ug->remove(m_g[nearest_0->nr].position); + m_g.deleteNode(nearest_0->nr); + break; + } + if (edg != nearest_0->end()) + --edg; + else + break; + DBG_PTR(m_logger, 3, "GNGAlgorith::Adapt::Removal completed"); + } + ++edg; + } + + //erase nodes + if (this->m_utility_option == BasicUtility) + this->utilityCriterionCheck(); + + return std::pair(error, nearest.first); +} + +double GNGAlgorithm::calculateAccumulatedError() { + + int maximum_index = m_g.get_maximum_index(); + m_accumulated_error = 0.0; + + if (this->m_toggle_lazyheap) { + + m_g.lock(); + int maximum_index = m_g.get_maximum_index(); + m_accumulated_error = 0.0; + + REP(i, maximum_index + 1) + if (m_g.existsNode(i)) + m_accumulated_error += m_g[i].error; + + m_g.unlock(); + return m_accumulated_error; + } else { + m_g.lock(); + m_accumulated_error = 0.0; + REP(i, maximum_index + 1) + if (m_g.existsNode(i)) + m_accumulated_error += m_g[i].error; + + m_g.unlock(); + return m_accumulated_error; + } +} + + + +void GNGAlgorithm::resizeUniformGrid() { + + DBG_PTR(m_logger, 6, "GNGAlgorithm::Resize Uniform Grid"); + DBG_PTR(m_logger, 6, + "GNGAlgorithm::Resize Uniform Grid old_l=" + + to_string(ug->getCellLength())); + DBG_PTR(m_logger, 6, + "GNGAlgorithm::Resize Uniform Grid new_l=" + + to_string(ug->getCellLength() / m_grow_rate)); + + ug->new_l(ug->getCellLength() / m_grow_rate); + + int maximum_index = m_g.get_maximum_index(); + + REP(i, maximum_index + 1) + if (m_g.existsNode(i)) + ug->insert(m_g[i].position, m_g[i].nr); + +} + +GNGNode ** GNGAlgorithm::LargestErrorNodes() { + DBG_PTR(m_logger, 2, "LargestErrorNodes::started procedure"); + + GNGNode ** largest = new GNGNode*[2]; + + largest[0] = 0; + largest[1] = 0; + double error = -1.0; + + REP(i, m_g.get_maximum_index() + 1) + if (m_g.existsNode(i)) + error = std::max(error, m_g[i].error); + + REP(i, m_g.get_maximum_index() + 1) + if (m_g.existsNode(i)) + if (m_g[i].error == error) + largest[0] = &m_g[i]; + + if (largest[0]->edgesCount == 0) { //{largest[0]->error=0; return largest;} //error? + m_g.deleteNode(largest[0]->nr); + return largest; + } + + int j = 0; + + FOREACH(GNGEdge* edg, *largest[0]) + { + ++j; + + if (j == 1) { + largest[1] = &m_g[(edg)->nr]; + error = largest[1]->error; + continue; + } + + double new_error = m_g[(edg)->nr].error; + + if (error < new_error) { + error = new_error; + largest[1] = &m_g[(edg)->nr]; + } + } + + return largest; +} + +void GNGAlgorithm::updateClustering() { + gmum::scoped_lock db_lock(*g_db); + for(unsigned int i=0;isize();++i){ + set_clustering(i, _getNearestNeurons(g_db->getPosition(i)).first); + } +} + +void GNGAlgorithm::runAlgorithm() { //1 thread needed to do it (the one that computes) + m_gng_status = m_gng_status_request = GNG_RUNNING; + //Initialize global counters + s = 0; + c = 0; // cycle variable for lazyheap optimization + + while (g_db->size() < 2) { + this->status_change_mutex.lock(); + while (m_gng_status_request == GNG_PAUSED) { + if (m_gng_status_request == GNG_TERMINATED){ + m_gng_status = GNG_TERMINATED; + status_change_mutex.unlock(); + return; + } + this->status_change_condition.wait(this->status_change_mutex); + } + this->status_change_mutex.unlock(); + } + + if (m_g.get_number_nodes() == 0) { + gmum::scoped_lock db_lock(*g_db); + gmum::scoped_lock graph_lock(m_g); + randomInit(); + } else if (m_g.get_number_nodes() == 1) { + throw BasicException("Incorrect passed graph to GNGAlgorithm"); + } + + //We have to calculate error so we will collect error from adapt + //and when count is > dataset size we will set m_mean_error + double accumulated_error = 0.0; + double time_elapsed =0., time_elapsed_last_error=0.; + int accumulated_error_count = 0, accumulated_error_count_last = 0; + + LOG_PTR(m_logger, 3, "GNGAlgorithm::init successful, starting the loop"); + + for(int iteration=0; iterationstatus_change_mutex.lock(); + while (this->m_gng_status_request == GNG_PAUSED) { + m_gng_status = m_gng_status_request; + this->status_change_condition.wait(this->status_change_mutex); + } + if (this->m_gng_status_request == GNG_TERMINATED){ + LOG_PTR(m_logger, 5, "GNGAlgorithm::terminate"); + this->status_change_mutex.unlock(); + break; + } + this->status_change_mutex.unlock(); + m_gng_status = GNG_RUNNING; + + double dt =0.; + boost::posix_time::ptime start = boost::posix_time::microsec_clock::local_time(); + + for (s = 0; s < m_lambda; ++s) { //global counter!! + + const double * position, *vertex_data; + unsigned int ex = 0; + { + //Fined grained locks are necessary to prevent deadlocks + gmum::scoped_lock db_lock(*g_db); + ex = g_db->drawExample(); + position = g_db->getPosition(ex); + vertex_data = g_db->getExtraData(ex); + } + + gmum::scoped_lock graph_lock(m_g); + std::pair adapt_result = adapt(position, vertex_data); + + ASSERT(adapt_result.second >= 0); + + set_clustering(ex, adapt_result.second); + accumulated_error += adapt_result.first; + accumulated_error_count += 1; + } + +#ifdef GMUM_DEBUG + for (int i = 0; i <= m_g.get_maximum_index(); ++i) { //another idea for storing list of actual nodes? + if (m_g.existsNode(i) && m_g[i].edgesCount == 0 && m_utility_option == None) { + CERR("Error at " + to_string(i))); + } + } +#endif + + dt = ((boost::posix_time::microsec_clock::local_time() - start).total_milliseconds()+ 1.)/1000.0 ; + time_elapsed += dt; + time_elapsed_last_error += dt; + + //Calculate mini-batch error + if ((time_elapsed_last_error > 0.1 && accumulated_error_count > 5 * m_g.get_number_nodes()) || + accumulated_error_count > 15 * m_g.get_number_nodes()) { + gmum::scoped_lock stat_lock(m_statistics_mutex); + + m_mean_error.push_back(make_pair(time_elapsed, + accumulated_error/(double)accumulated_error_count + )); + + accumulated_error_count_last = accumulated_error_count; + time_elapsed_last_error = 0.0; + accumulated_error = 0.0; + accumulated_error_count = 0; + } + + { + gmum::scoped_lock graph_lock(m_g); + addNewNode(); + + if (m_toggle_uniformgrid && ug->check_grow()) { + DBG_PTR(m_logger, 10, "GNGAlgorithm:: resizing uniform grid"); + resizeUniformGrid(); + } + + ++c; //epoch + if (!m_toggle_lazyheap) + decreaseAllErrors(); + if (this->m_utility_option == BasicUtility) + decreaseAllUtility(); + } + ++m_iteration; + + DBG_PTR(m_logger, 9, "GNGAlgorithm::iteration "+to_string(m_iteration)); + } + m_gng_status = GNG_TERMINATED; + DBG_PTR(m_logger, 30, "GNGAlgorithm::Terminated server"); +} + + + + + + + + + + +/** Start algorithm loop */ +void GNGAlgorithm::run(bool synchronized) { + //TODO: refactor run to resume? + if(m_gng_status == GNG_TERMINATED){ + return; + } + + if(m_gng_status != GNG_RUNNING){ + m_gng_status_request = GNG_RUNNING; + this->status_change_condition.notify_all(); + } + if(this->g_db->size() > 2 && synchronized){ + //Algorithm should start. Run is synchronized. + //Terminated is also accepted state + while(m_gng_status == GNG_PAUSED){ + gmum::sleep(10); + } + } +} + +bool GNGAlgorithm::isRunning(){ + return this->m_gng_status == GNG_RUNNING; +} + +/** Pause algorithm loop */ +void GNGAlgorithm::pause(bool synchronized) { + if(this->m_gng_status != GNG_PAUSED){ + this->m_gng_status_request = GNG_PAUSED; + this->status_change_condition.notify_all(); + } + if(this->g_db->size() > 2 && synchronized){ + //Terminated is also accepted state + while(m_gng_status == GNG_RUNNING){ + this->m_gng_status_request = GNG_PAUSED; + gmum::sleep(10); + } + } +} + +/** Terminate the algorithm */ +void GNGAlgorithm::terminate(bool synchronized) { + if(this->m_gng_status != GNG_TERMINATED){ + this->m_gng_status_request = GNG_TERMINATED; + this->status_change_condition.notify_all(); + } + if(synchronized){ + while(m_gng_status == GNG_RUNNING){ + this->m_gng_status_request = GNG_TERMINATED; + gmum::sleep(10); + } + } +} + +void GNGAlgorithm::setMaxNodes(int value) { + m_max_nodes = value; +} + +int GNGAlgorithm::getIteration() const{ + return m_iteration; +} + +unsigned GNGAlgorithm::getErrorIndex() const{ + return m_mean_error.size(); +} + +double GNGAlgorithm::getMeanError() { + + gmum::scoped_lock alg_lock(m_statistics_mutex); + DBG_PTR(m_logger, 3, gmum::to_string(m_mean_error.size())); + if(m_mean_error.size() == 0){ + return -1.0; + }else{ + + return m_mean_error[m_mean_error.size()-1].second; + } +} + +vector > GNGAlgorithm::getMeanErrorStatistics() { + gmum::scoped_lock alg_lock(m_statistics_mutex); + if(m_mean_error.size() == 0){ + return vector >(1, make_pair(0., std::numeric_limits::max())); + }else{ + return vector >(m_mean_error.begin(), m_mean_error.end()); + } +} + +//Retrieve clustering result. +//@note pauses algorithm as many +const vector & GNGAlgorithm::get_clustering(){ + bool was_running = false; + if(isRunning()){ + was_running = true; + pause(); + } + vector & result = clustering_result; + if(was_running) + run(); + + return result; +} + + GNGAlgorithm::~GNGAlgorithm() { + delete[] m_betha_powers_to_n; + delete[] m_betha_powers; +} + + + +std::pair GNGAlgorithm::_getNearestNeurons(const double *ex){ + if (m_toggle_uniformgrid) { + DBG_PTR(m_logger, 1, "GNGAlgorithm::Adapt::Graph size " + to_string(m_g.get_number_nodes())); + std::vector nearest_index = ug->findNearest(ex, 2); //TwoNearestNodes(ex->position); + DBG_PTR(m_logger, 1, "GNGAlgorithm::Adapt::Found nearest"); + + #ifdef GMUM_DEBUG_2 + if (nearest_index[0] == nearest_index[1]) { + throw BasicException("Found same nearest_indexes"); //something went wrong (-1==-1 teĹĽ) + } + #endif + + + #ifdef GMUM_DEBUG_2 + ASSERT(m_g[nearest_index[1]].position > m_g.get_dist(m_g[nearest_index[0]].position, ex)); + #endif + + return std::pair(nearest_index[0], nearest_index[1]); + } else { + DBG_PTR(m_logger, 1, "GNGAlgorithm::just called TwoNearestNodes"); + + int start_index = 0; + while (!m_g.existsNode(start_index)) + ++start_index; + + double dist0 = m_g.get_dist(ex, m_g[start_index].position); + int best_0 = start_index, best_1 = -1; + for (int i = start_index + 1; i <= m_g.get_maximum_index(); ++i) { + if (m_g.existsNode(i)) { + double new_dist = m_g.get_dist(ex, m_g[i].position); + if (dist0 > new_dist) { + dist0 = new_dist; + best_0 = i; + } + } + } + + DBG_PTR(m_logger, 1, "finding next\n"); + + start_index = 0; + while (!m_g.existsNode(start_index) || start_index == best_0) + ++start_index; + double dist1 = m_g.get_dist(ex, m_g[start_index].position); + best_1 = start_index; + + for (int i = start_index + 1; i <= m_g.get_maximum_index(); ++i) { //another idea for storing list of actual nodes? + if (m_g.existsNode(i) && i != best_0) { + double new_dist = m_g.get_dist(ex, m_g[i].position); + if (dist1 > new_dist) { + dist1 = new_dist; + best_1 = i; + } + } + } + + + #ifdef GMUM_DEBUG_2 + ASSERT(dist1 > dist0); + #endif + + return std::pair(best_0, best_1); + } +} + + +void GNGAlgorithm::resetUniformGrid(double * orig, double *axis, double l) { + ug->purge(orig, axis, l); + int maximum_index = m_g.get_maximum_index(); + + REP(i, maximum_index + 1) + { + if (m_g.existsNode(i)) + ug->insert(m_g[i].position, m_g[i].nr); + } +} + +bool GNGAlgorithm::stoppingCriterion() { + return m_g.get_number_nodes() > m_max_nodes; +} + +void GNGAlgorithm::increaseErrorNew(GNGNode * node, double error) { + fixErrorNew(node); + ASSERT(m_lambda - s <= m_betha_powers_size -1); + node->error += m_betha_powers[m_lambda - s] * error; + errorHeap.updateLazy(node->nr); +} + +void GNGAlgorithm::fixErrorNew(GNGNode * node) { + + if (node->error_cycle == c) + return; + + while(c - node->error_cycle > m_betha_powers_to_n_length - 1){ + DBG_PTR(m_logger, 5, "Recreating m_betha_powers_to_n"); + delete[] m_betha_powers_to_n; + m_betha_powers_to_n_length *= 2; + m_betha_powers_to_n = new double[m_betha_powers_to_n_length]; + REP(i, m_betha_powers_to_n_length) + m_betha_powers_to_n[i] = std::pow(m_betha, m_lambda * (double) (i)); + } + + ASSERT(c - node->error_cycle <= m_betha_powers_to_n_length -1); + + node->error = m_betha_powers_to_n[c - node->error_cycle] * node->error; + node->error_cycle = c; + +} + + +void GNGAlgorithm::set_clustering(unsigned int ex, unsigned int node_idx){ + + if(ex + 1 > clustering_result.size()){ + DBG_PTR(m_logger, 6, "Resizing clustering_result to "+to_string(g_db->size())); + clustering_result.resize(g_db->size()); + } + + //Can potentially happen in case of shrinkage of dataset size + if(ex + 1 > clustering_result.size()){ + CERR("g_db->size mismatch with ex index?\n"); + return; + } + + + clustering_result[ex] = node_idx; +} + +double GNGAlgorithm::getMaximumError() const { + double max_error = 0; + int maximum_index = m_g.get_maximum_index(); + REP(i,maximum_index+1) + if (m_g.existsNode(i)) + max_error = std::max(max_error, m_g[i].error); + return max_error; +} + +void GNGAlgorithm::decreaseAllErrorsNew() { + return; +} + +void GNGAlgorithm::decreaseErrorNew(GNGNode * node) { + fixErrorNew(node); + node->error = m_alpha * node->error; + errorHeap.updateLazy(node->nr); +} + +void GNGAlgorithm::setErrorNew(GNGNode * node, double error) { + node->error = error; + node->error_cycle = c; + errorHeap.insertLazy(node->nr); +} + +void GNGAlgorithm::increaseError(GNGNode * node, double error) { + node->error += error; +} + +void GNGAlgorithm::decreaseAllErrors() { + int maximum_index = m_g.get_maximum_index(); + REP(i,maximum_index+1) + if (m_g.existsNode(i)) + m_g[i].error = m_betha * m_g[i].error; +} + +void GNGAlgorithm::decreaseError(GNGNode * node) { + node->error = m_alpha * node->error; +} + +void GNGAlgorithm::setError(GNGNode * node, double error) { + node->error = error; +} + +// Note: this code is not optimal and is inserted only for research purposes + +double GNGAlgorithm::getUtility(int i) { + return m_g[i].utility; +} + +void GNGAlgorithm::setUtility(int i, double u) { + m_g[i].utility = u; +} + +void GNGAlgorithm::utilityCriterionCheck() { + + if (m_g.get_number_nodes() < 10) + return; //just in case + + double max_error = this->getMaximumError(); + int maximum_index = m_g.get_maximum_index(); + + double min_utility = 100000000; + int min_utility_index = -1; + + for (int i = 0; i <= maximum_index; ++i) + if (min_utility > getUtility(i)) { + min_utility = getUtility(i); + min_utility_index = i; + } + + if (m_g.existsNode(min_utility_index) && max_error / getUtility(min_utility_index) > m_utility_k) { + + DBG_PTR(m_logger,2, "GNGAlgorithm:: removing node with utility "+gmum::to_string(getUtility(min_utility_index)) + " max error "+gmum::to_string(max_error)); + + DBG_PTR(m_logger,2,gmum::to_string(max_error)); + + GNGNode::EdgeIterator edg = m_g[min_utility_index].begin(); + while (edg != m_g[min_utility_index].end()) { + int nr = (*edg)->nr; + edg = m_g.removeUDEdge(min_utility_index, nr); + } + + m_g.deleteNode(min_utility_index); + setUtility(min_utility_index, 0); + } + +} +void GNGAlgorithm::decreaseAllUtility() { + int maximum_index = m_g.get_maximum_index(); + for (int i = 0; i <= maximum_index; ++i) + if (m_g.existsNode(i)) + setUtility(i, getUtility(i) * (m_betha)); +} + + + + + + + + + + + + +} diff --git a/src/gng/gng_configuration.cpp b/src/gng/gng_configuration.cpp new file mode 100644 index 00000000..d7a8c61c --- /dev/null +++ b/src/gng/gng_configuration.cpp @@ -0,0 +1,194 @@ +#include "gng_configuration.h" + +void GNGConfiguration::deserialize(std::istream & in) { + ///Utility constant + in >> experimental_utility_k; + + ///Utility option. Currently supported simples utility + in >> experimental_utility_option; + + /**Maximum number of nodes*/ + in >> max_nodes; //=1000; + /**Uniform grid optimization*/ + in >> uniformgrid_optimization; //=true,lazyheap=true; + /**Lazy heap optimization*/ + in >> lazyheap_optimization; + /**Bounding box specification*/ + + /**Dimensionality of examples*/ + in >> dim; + + REPORT(dim); + + orig = vector(dim, 0); + axis = vector(dim, 0); + + for (size_t i = 0; i < dim; ++i) { + in >> axis[i] >> orig[i]; + } + /**Max edge age*/ + in >> max_age; //=200; + /**Alpha coefficient*/ + in >> alpha; //=0.95; + /**Beta coefficient*/ + in >> beta; //=0.9995; + /**Lambda coefficient*/ + in >> lambda; //=200; + /**Epsilion v. How strongly move winning node*/ + in >> eps_w; //=0.05; + /**Memory bound*/ + in >> graph_memory_bound; + /**Epsilion n*/ + in >> eps_n; //=0.0006; + + in >> verbosity; + + /**Pseudodistance function used (might be non metric)*/ + in >> distance_function; + + /**Type of used database, unsgined int for compabititlity with Rcpp**/ + in >> datasetType; + + /**Initial reserve memory for nodes */ + in >> starting_nodes; + + in >> max_iter; + } + + void GNGConfiguration::serialize(std::ostream & out) const{ + ///Utility constant + out << experimental_utility_k << endl; + + ///Utility option. Currently supported simples utility + out << experimental_utility_option << endl; + + /**Maximum number of nodes*/ + out << max_nodes << endl; //=1000; + /**Uniform grid optimization*/ + out << uniformgrid_optimization << endl; //=true,lazyheap=true; + /**Lazy heap optimization*/ + out << lazyheap_optimization << endl; + /**Bounding box specification*/ + + /**Dimensionality of examples*/ + out << dim << endl; + + REPORT("Saving: ") + REPORT(dim); + + for (size_t i = 0; i < dim; ++i) { + out << axis[i] << endl << orig[i] << endl; + } + /**Max edge age*/ + out << max_age << endl; //=200; + /**Alpha coefficient*/ + out << alpha << endl; //=0.95; + /**Beta coefficient*/ + out << beta << endl; //=0.9995; + /**Lambda coefficient*/ + out << lambda << endl; //=200; + /**Epsilion v. How strongly move winning node*/ + out << eps_w << endl; //=0.05; + /**Memory bound*/ + out << graph_memory_bound << endl; + /**Epsilion n*/ + out << eps_n << endl; //=0.0006; + + out << verbosity << endl; + + /**Pseudodistance function used (might be non metric)*/ + out << distance_function << endl; + + /**Type of used database, unsgined int for compabititlity with Rcpp**/ + out << datasetType << endl; + + /**Initial reserve memory for nodes */ + out << starting_nodes << endl; //imporant not to add endl for binary correctness + + out << max_iter; // NOTE: don't put endl here, rest of the serialization is binary + + + } + + + /**Validate server configuration. *Not working now**/ + bool GNGConfiguration::check_correctness() { + if(alpha <= 0 || alpha > 1){ + CERR("ERROR: alpha should be in range (0,1)\n"); + return false; + } + + if(beta <= 0 || beta > 1){ + CERR("ERROR: beta should be in range [0,1]\n"); + return false; + } + + if(eps_w <= 0 || eps_w > 1){ + CERR("ERROR: eps_v should be in range [0,1]\n"); + return false; + } + + if(eps_n <= 0 || eps_n > 1){ + CERR("ERROR: eps_v should be in range [0,1]\n"); + return false; + } + + if(max_age <= 2 || max_age > 10000){ + CERR("ERROR: max_age should be in range [2,10000]\n"); + return false; + } + + if((max_iter <= 2 && max_iter != -1) || max_nodes <= 2){ + CERR("ERROR: max_iter and max_nodes should be in range [3,+inf]\n"); + return false; + + } + + if(dim <= 0){ + CERR("ERROR: incorrect dimensionality\n"); + return false; + } + + if(lambda <= 0 || lambda > 100000){ + CERR("ERROR lambda should in range [0, 100000]\n"); + return false; + } + + + + if (experimental_utility_option != UtilityOff){ + if(uniformgrid_optimization || lazyheap_optimization) { + CERR("ERROR: please turn OFF optimization when using experimental utility option\n"); + return false; + } + } + + if (datasetType > 3 or datasetType <= 0) { + CERR("ERROR: wrong database specified\n"); + + return false; + } + if (!(dim < 20 || !uniformgrid_optimization)) { + + CERR( + "WARNING_LEVEL: It might be too big dimensionality for OptimizedGNG." + "OptimizedGNG works best for smaller dimensionality dataset" + "Consider using PCA or other dim. reduction technique" + "\n"); + + } + if (!(distance_function == gmum::GNGGraph::Euclidean + || !uniformgrid_optimization)) { + + CERR("ERROR: You can use only Euclidean distance function with uniformgrid optimization\n"); + return false; + } + if (!(!uniformgrid_optimization + or (dim == axis.size() && dim == orig.size()))) { + + CERR("ERROR: dimensionality doesn't agree with axis and orig"); + return false; + } + + return true; + } diff --git a/src/gng/gng_module.cpp b/src/gng/gng_module.cpp new file mode 100644 index 00000000..c79ec3ec --- /dev/null +++ b/src/gng/gng_module.cpp @@ -0,0 +1,94 @@ +/* + * File constructs R Interface, exports necessary classes and functions using Rcpp package + */ + +#ifdef RCPP_INTERFACE + +#include +using namespace Rcpp; + +class GNGConfiguration; +class GNGServer; + +RCPP_EXPOSED_CLASS(GNGConfiguration) +RCPP_EXPOSED_CLASS(GNGServer) + + + +#include +#include +#include +using namespace gmum; + + +GNGServer * loadFromFile(std::string filename){ + GNGServer * out = new GNGServer(filename); + return out; +} + +RCPP_MODULE(gng_module){ + //TODO: Rcpp doesn't accept dot starting name so no way to hide it easily + Rcpp::function("fromFileGNG", &loadFromFile); + + + class_("GNGConfiguration" ) + .constructor() + + .field(".uniformgrid_optimization", &GNGConfiguration::uniformgrid_optimization, "Uniform grid optimization" ) + .field(".lazyheap_optimization", &GNGConfiguration::lazyheap_optimization ) + + .field("alpha", &GNGConfiguration::alpha, "Alpha coefficient. " + "Decrease the error variables of the nodes neighboring to the newly inserted node by this fraction. Default 0.5") + .field("beta", &GNGConfiguration::beta, "Beta coefficient. " + "Decrease the error variables of all node nodes by this fraction. Forgetting rate. Default 0.99") + + .field("eps_n", &GNGConfiguration::eps_n, "How strongly move neighbour node. Default 0.0006") + .field(".experimental_utility_option", &GNGConfiguration::experimental_utility_option, "Default 0 (off). You can turn it on to 1, but remember to turn off optimizations. Likely will change in the future.") + .field(".experimental_utility_k", + &GNGConfiguration::experimental_utility_k, "Default 1.3 (note: option is off by default). ") + + .field("eps_w", &GNGConfiguration::eps_w, "How strongly move winner node. Default 0.05") + .field("max_edge_age", &GNGConfiguration::max_age, "Max edge age") + .field("dim", &GNGConfiguration::dim, "Vertex position dimensionality") + .field("lambda", &GNGConfiguration::lambda, "Every lambda iteration is added new vertex. Default 200") + .field(".dataset_type", &GNGConfiguration::datasetType, "Dataset type. Currently supported:" + "2: DatasetBagging - examples are sampled from dataset with equal probability, " + "3: DatasetBaggingProbability - examples are sampled with probability equal to pos_dim+vertex_dim coordinate (last number in vector)") + .field("max_nodes", &GNGConfiguration::max_nodes) + .field("verbosity", &GNGConfiguration::verbosity) + .field("seed", &GNGConfiguration::seed) + .field("max_iter", &GNGConfiguration::max_iter) + .method(".check_correctness", &GNGConfiguration::check_correctness) + .method(".set_bounding_box", &GNGConfiguration::setBoundingBox) + .method(".show", &GNGConfiguration::show); + + class_("GNGServer") + .constructor() + .method(".setVerbosity", &GNGServer::setVerbosity) + .method(".save", &GNGServer::save) + .method(".isRunning", &GNGServer::isRunning) + .method(".run", &GNGServer::run) + .method(".pause", &GNGServer::pause) + .method(".terminate", &GNGServer::terminate) + .method(".exportToGraphML", &GNGServer::exportToGraphML) + .method(".getGNGErrorIndex", &GNGServer::getGNGErrorIndex) + .method(".hasStarted", &GNGServer::hasStarted) + .method(".nodeDistance", &GNGServer::nodeDistance) + .method(".insertExamples", &GNGServer::RinsertExamples) + .method(".insertLabeledExamples", &GNGServer::RinsertLabeledExamples) + .method(".predict", &GNGServer::Rpredict) + .method(".getConfiguration", &GNGServer::getConfiguration) + .method(".getLastNodeIndex", &GNGServer::_getLastNodeIndex) + .method(".updateClustering", &GNGServer::_updateClustering) + .method("getClustering", &GNGServer::RgetClustering) + .method("getErrorStatistics", &GNGServer::RgetErrorStatistics) + .method("getCurrentIteration", &GNGServer::getCurrentIteration) + .method("getDatasetSize", &GNGServer::getDatasetSize) + .method("getNumberNodes", &GNGServer::getNumberNodes) + .method("getNode", &GNGServer::getNode) + .method("getMeanError", &GNGServer::getMeanError); +} + +#include + +#endif diff --git a/src/gng/gng_server.cpp b/src/gng/gng_server.cpp new file mode 100644 index 00000000..bc52430b --- /dev/null +++ b/src/gng/gng_server.cpp @@ -0,0 +1,494 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +GNGServer::GNGServer(std::string filename) { + std::ifstream input; + input.open(filename.c_str(), ios::in | ios::binary); + + GNGConfiguration conf; + conf.deserialize(input); + + init(conf, &input); +} + +GNGServer::GNGServer(GNGConfiguration configuration, + std::istream * input_graph) { + + + init(configuration, input_graph); +} + +void GNGServer::init(GNGConfiguration configuration, + std::istream * input_graph) { + + m_index = gng_server_count++; + + + /* If verbosity > 0 and production and using RCPP then logging to file + It is because Rcout doesn't work outside of main thread + This is first time I used nested macro ifs. This might be refactored to choose during creation like "log_to_file" + parameter + */ + #ifdef RCPP_INTERFACE + #ifdef DEBUG_GMUM + m_logger = boost::shared_ptr(new Logger(configuration.verbosity)); + #else + if(configuration.verbosity != 0){ + ofstream* log_file = new ofstream(); + log_file->open("gng.log"); // Could create one for each gng, but this can create massive amount of files + // TODO: There is leaked resource here + m_logger = boost::shared_ptr(new Logger(configuration.verbosity, *log_file)); + COUT("Logging to gng.log"); + } else { + m_logger = boost::shared_ptr(new Logger(configuration.verbosity)); + } + #endif + #else + m_logger = boost::shared_ptr(new Logger(configuration.verbosity)); + #endif + + algorithm_thread = 0; + m_running_thread_created = false; + + + + if(configuration.seed != -1){ + LOG_PTR(m_logger, 5, "GNGServer()::seeding to "+to_str(configuration.seed)); + __seed(configuration.seed); + } + + LOG_PTR(m_logger,5, "GNGServer()::constructing GNGServer"); + + if (!configuration.check_correctness()){ + throw invalid_argument("Invalid configuration passed to GNGServer"); + } + + this->current_configuration = configuration; //assign configuration + + + + if (current_configuration.graph_storage == GNGConfiguration::RAMMemory) { + //Nothing to do here + } else { + throw invalid_argument("Not supported GNGConfiguration type"); + } + + /** Construct database **/ + if (current_configuration.datasetType + == GNGConfiguration::DatasetSampling) { + DBG_PTR(m_logger,11, "GNGServer::Constructing Normal Sampling Prob Dataset"); + this->gngDataset = std::auto_ptr( + new GNGDatasetSimple(&database_mutex, + current_configuration.dim, true /* store_extra */, + GNGDatasetSimple::Sampling, current_configuration.seed, m_logger)); + } else if (current_configuration.datasetType + == GNGConfiguration::DatasetSamplingProb) { + //Add probability to layout + DBG_PTR(m_logger,11, "GNGServer::Constructing Sampling Prob Dataset"); + this->gngDataset = std::auto_ptr( + new GNGDatasetSimple(&database_mutex, + current_configuration.dim, true /* store_extra */, + GNGDatasetSimple::SamplingProbability, current_configuration.seed, + m_logger)); + } else if (current_configuration.datasetType + == GNGConfiguration::DatasetSeq) { + DBG_PTR(m_logger,11, "GNGServer::Constructing Normal Seq Dataset"); + this->gngDataset = std::auto_ptr( + new GNGDatasetSimple(&database_mutex, + current_configuration.dim, true /* store_extra */, + GNGDatasetSimple::Sequential, current_configuration.seed, m_logger)); + } else { + DBG_PTR(m_logger,11, "GNGServer::Not recognized dataset"); + throw BasicException( + "Database type not supported " + + to_string(current_configuration.datasetType)); + } + + DBG_PTR(m_logger,10, "GNGServer()::gngDatabase constructed"); + + /** Construct graph **/ + if (current_configuration.graph_storage == GNGConfiguration::SharedMemory) { + throw BasicException("Not supported SharedMemory configuration"); + } else if (current_configuration.graph_storage + == GNGConfiguration::RAMMemory) { + REPORT(current_configuration.starting_nodes); + + this->gngGraph = + std::auto_ptr >( + new RAMGNGGraph(&grow_mutex, + current_configuration.dim, + current_configuration.starting_nodes, + (gmum::GNGGraph::GNGDistanceFunction) current_configuration.distance_function, + m_logger)); + + } else { + throw BasicException("Not supported GNGConfiguration type"); + } + + if (input_graph) { + this->gngGraph->load(*input_graph); + } + + DBG_PTR(m_logger,10, "GNGServer()::constructing algorithm object"); + + /** Initiliaze main computing object **/ + this->gngAlgorithm = std::auto_ptr( + new GNGAlgorithm( + this->gngGraph.get(), //I do not want algorithm to depend on boost + this->gngDataset.get(), ¤t_configuration.orig[0], + ¤t_configuration.axis[0], + current_configuration.axis[0] * 1.1, //only 2^dim //TODO: min + current_configuration.max_nodes, + current_configuration.max_age, current_configuration.alpha, + current_configuration.beta, current_configuration.lambda, + current_configuration.eps_w, current_configuration.eps_n, + current_configuration.dim, + current_configuration.uniformgrid_optimization, + current_configuration.lazyheap_optimization, + current_configuration.experimental_utility_option, + current_configuration.experimental_utility_k, + current_configuration.max_iter, + current_configuration.seed, + m_logger)); + + DBG_PTR(m_logger,10, "GNGServer()::constructed algorithm object"); + +} + +void GNGServer::run() { + if(!algorithm_thread){ + DBG_PTR(m_logger,10, "GNGServer::runing algorithm thread"); + algorithm_thread = new gmum::gmum_thread(&GNGServer::_run, (void*) this); + DBG_PTR(m_logger,10, "GNGServer::runing collect_statistics thread"); + m_running_thread_created = true; + }else{ + gngAlgorithm->run(/*synchronized*/ true); + } +} + +GNGConfiguration GNGServer::getConfiguration() { + return current_configuration; +} + +bool GNGServer::isRunning() const { + if (!gngAlgorithm.get()) { + return false; + } + return gngAlgorithm->isRunning(); +} + +double GNGServer::nodeDistance(int id1, int id2) const { + if (gngAlgorithm->isRunning()) { + CERR("nodeDistance: Please pause algorithm before calling nodeDistance function\n"); + return -1.0; + } + if (id1 <= 0 || id2 <= 0) { + CERR("nodeDistance: Indexing starts from 1\n"); + return -1.0; + } + return gngGraph->get_dist(id1 - 1, id2 - 1); +} + +void GNGServer::save(std::string filename) { + bool wasRunning = gngAlgorithm->isRunning(); + + gngAlgorithm->pause(true); + + ASSERT(!gngAlgorithm->isRunning()); + + std::ofstream output; + output.open(filename.c_str(), ios::out | ios::binary); + + current_configuration.serialize(output); + + try { + gngGraph->lock(); + ASSERT(filename != ""); + gngGraph->serialize(output); + } catch (...) { +#ifdef DEBUG_GMUM + throw BasicException("Failed exporting to binary format\n"); +#endif + gngGraph->unlock(); //No RAII, yes.. + output.close(); + return; + } + gngGraph->unlock(); + output.close(); + + if(wasRunning){ + gngAlgorithm->run(false); + } +} + +unsigned int GNGServer::getCurrentIteration() const { + return gngAlgorithm->getIteration(); +} + +///Exports GNG state to file +void GNGServer::exportToGraphML(std::string filename) { + try { + gngGraph->lock(); + ASSERT(filename != ""); + writeToGraphML(getGraph(), filename); + } catch (...) { +#ifdef DEBUG_GMUM + throw BasicException("Failed exporting to GraphML\n"); +#endif + gngGraph->unlock(); //No RAII, yes.. + return; + } + gngGraph->unlock(); +} + +///Insert examples +void GNGServer::insertExamples(double * positions, double * extra, + double * probability, unsigned int count, unsigned int dim) { + gmum::scoped_lock lock(gngDataset.get()); + + if (dim != current_configuration.dim) { + DBG_PTR(m_logger,10, "Wrong dimensionality is "+gmum::to_string(count*dim)+" expected "+ + gmum::to_string(count*gngDataset->getDataDim()) + + " data dim " + gmum::to_string(gngDataset->size())); + throw invalid_argument("Wrong dimensionality"); + } + + gngDataset->insertExamples(positions, extra, probability, count); + DBG_PTR(m_logger,7, "GNGServer::Database size "+gmum::to_string(gngDataset->size())); + +} + +unsigned int GNGServer::getNumberNodes() const { + int nr = this->gngGraph->get_number_nodes(); + return nr; +} + + + +double GNGServer::getMeanError() { + return gngAlgorithm->getMeanError(); +} + +bool GNGServer::hasStarted() const{ + return this->getCurrentIteration() != 0; +} + +vector GNGServer::getMeanErrorStatistics() { + vector > errors = + gngAlgorithm->getMeanErrorStatistics(); + vector out; + out.reserve(errors.size()); + for (unsigned i = 0; i < errors.size(); ++i) { + out.push_back(errors[i].second); + } + return out; +} + +unsigned GNGServer::getGNGErrorIndex() const{ + return gngAlgorithm->getErrorIndex(); +} + +#ifdef RCPP_INTERFACE + +void GNGServer::_updateClustering(){ + gngAlgorithm->updateClustering(); +} + +//This is tricky - used only by convertToIGraph in R, because +//it might happen that we delete nodes and have bigger index of the last node +//than actual nodes (especially in the case of utility version of GNG) +unsigned int GNGServer::_getLastNodeIndex() const { + return gngGraph->get_maximum_index() + 1; +} + +///Constructor needed for RCPPInterface +GNGServer::GNGServer(GNGConfiguration * configuration) { + init(*configuration, 0 /*input_graph*/); +} + +///Moderately slow function returning node descriptors +Rcpp::List GNGServer::getNode(int index) { + int gng_index = index - 1; //1 based + + if(index <= 0) { + CERR("Indexing of nodes starts from 1 (R convention)\n"); + List ret; + return ret; + } + + gngGraph->lock(); + + if(!gngGraph->existsNode(gng_index)) { + List ret; + return ret; + } + + GNGNode & n = getGraph()[gng_index]; + NumericVector pos(n.position, n.position + gngDataset->getGNGDim()); + + List ret; + ret["pos"] = pos; + ret["index"] = index; //useful for graph processing + ret["error"] = n.error; + ret["label"] = n.extra_data; + + if(getConfiguration().experimental_utility_option != GNGConfiguration::UtilityOff) { + ret["utility"] = n.utility; + } + + vector neigh(n.size()); + GNGNode::EdgeIterator edg = n.begin(); + unsigned i = 0; + while(edg!=n.end()) { + neigh[i++] = (*edg)->nr + 1; + ++edg; + } + + ret["neighbours"] = IntegerVector(neigh.begin(), neigh.end()); + + gngGraph->unlock(); + + return ret; +} + +int GNGServer::Rpredict(Rcpp::NumericVector & r_ex) { + if(r_ex.size() > current_configuration.dim){ + CERR("Wrong example dimensionality. Note that C++ method accepts only vectors, not matrix, please use S4 predict method instead\n"); + return -1; + }else{ + return 1+gngAlgorithm->predict(std::vector(r_ex.begin(), r_ex.end()) ); + } +} + +Rcpp::NumericVector GNGServer::RgetClustering() { + const vector & x = gngAlgorithm->get_clustering(); + Rcpp::NumericVector out = NumericVector(x.begin(), x.end()); + for(size_t i=0;i x = getMeanErrorStatistics(); + return NumericVector(x.begin(), x.end()); +} + +void GNGServer::RinsertExamples(Rcpp::NumericMatrix & r_points){ + RinsertLabeledExamples(r_points, Rcpp::NumericVector()); +} + +void GNGServer::RinsertLabeledExamples(Rcpp::NumericMatrix & r_points, + Rcpp::NumericVector r_extra ) { + std::vector extra(r_extra.begin(), r_extra.end()); + arma::mat * points = new arma::mat(r_points.begin(), r_points.nrow(), r_points.ncol(), false); + + + arma::Row mean_colwise = arma::mean(*points, 0 /*dim*/); + arma::Row std_colwise = arma::stddev(*points, 0 /*dim*/); + arma::Row diff_std = arma::abs(std_colwise - 1.0); + float max_diff_std = arma::max(diff_std), max_mean = arma::max(mean_colwise); + if(max_diff_std > 0.1 || max_mean > 0.1) { + CERR("it is advised to scale data for optimal algorithm behavior to mean=1 std=0 \n"); + } + + //Check if data fits in bounding box + if(current_configuration.uniformgrid_optimization) { + arma::Row max_colwise = arma::max(*points, 0 /*dim*/); + arma::Row min_colwise = arma::min(*points, 0 /*dim*/); + arma::Row diff = max_colwise - min_colwise; + float max = arma::max(diff), min = arma::min(diff); + + for(size_t i=0;i min_colwise[i] || current_configuration.orig[i]+current_configuration.axis[i] < max_colwise[i]) { + CERR("Error: each feature has to be in range passed to gng.type.optimized"); + CERR("Error: returning, did not insert examples"); + return; + } + } + } + + arma::inplace_trans( *points, "lowmem"); + + if(extra.size()) { + if(!(points->n_cols== extra.size())){ + CERR("Error: please pass same number of labels as examples\n"); + return; + } + insertExamples(points->memptr(), &extra[0], 0 /*probabilty vector*/, + (unsigned int)points->n_cols, (unsigned int)points->n_rows); + } else { + + insertExamples(points->memptr(), 0 /* extra vector */, 0 /*probabilty vector*/, + (unsigned int)points->n_cols, (unsigned int)points->n_rows); + } + + arma::inplace_trans( *points, "lowmem"); + + if(!isRunning()){ + run(); + } +} + +#endif + +///Pause algorithm +void GNGServer::pause() { + if(gngAlgorithm.get()){ + if(!gngAlgorithm->isRunning()){ + //CERR("Called pause on not running gng object"); + //return; + } + gngAlgorithm->pause(/* synchronized*/ true); + } +} + +///Terminate algorithm +void GNGServer::terminate() { + if(gngAlgorithm.get()){ + LOG_PTR(m_logger,3, "GNGServer::getAlgorithm terminating"); + gngAlgorithm->terminate(/*synchronized*/true); + } +} + +GNGAlgorithm & GNGServer::getAlgorithm() { + return *this->gngAlgorithm.get(); +} +GNGGraph & GNGServer::getGraph() { + return *this->gngGraph.get(); +} +GNGDataset & GNGServer::getDatabase() { + return *this->gngDataset.get(); +} + +GNGServer::~GNGServer() { + LOG_PTR(m_logger, 5, "GNGServer::destructor for "+to_str(m_index)+" called"); + + if(gngAlgorithm.get()){ + terminate(); + } + + LOG_PTR(m_logger, 5, "GNGServer::joining to algorithm_thread"); + + if(algorithm_thread){ + algorithm_thread->join(); + } + + + + LOG_PTR(m_logger, 5, "GNGServer::destructor for "+to_str(m_index)+" finished"); +} + +unsigned GNGServer::getDatasetSize() const{ + if(gngDataset.get()){ + gmum::scoped_lock db_lock(*gngDataset.get()); + return gngDataset->size(); + }else{ + return 0; + } +} diff --git a/src/hello_gmum.cpp b/src/hello_gmum.cpp deleted file mode 100644 index 24be6d4f..00000000 --- a/src/hello_gmum.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include "hello_gmum.h" - -SEXP hello_gmum(){ - using namespace Rcpp ; - - CharacterVector x = CharacterVector::create( "hello", "gmum" ) ; - - return x ; -} diff --git a/src/hello_gmum.h b/src/hello_gmum.h deleted file mode 100755 index ae0fe1a7..00000000 --- a/src/hello_gmum.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef _gmum-r_RCPP_HELLO_WORLD_H -#define _gmum-r_RCPP_HELLO_WORLD_H - -#include - -/* - * note : RcppExport is an alias to `extern "C"` defined by Rcpp. - * - * It gives C calling convention to the rcpp_hello_world function so that - * it can be called from .Call in R. Otherwise, the C++ compiler mangles the - * name of the function and .Call can't find it. - * - * It is only useful to use RcppExport when the function is intended to be called - * by .Call. See the thread http://thread.gmane.org/gmane.comp.lang.r.rcpp/649/focus=672 - * on Rcpp-devel for a misuse of RcppExport - */ -RcppExport SEXP hello_gmum() ; - -#endif diff --git a/src/packThis.sh b/src/packThis.sh new file mode 100755 index 00000000..27b29753 --- /dev/null +++ b/src/packThis.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +# TODO: add checking for that +echo "Remember to run this script from root folder" + +echo $# +if [ "$#" -ne 1 ]; then + destination="pkg" +else + destination=$1 +fi + +echo "Packing to ", $destination + +echo "Creating folder structure" +mkdir -p $destination + +echo "Copy necessary files" +rsync -rP --exclude=".*" --exclude="demo/samples/*" --exclude="inst/data_sets/*" --exclude="cmake/*" --exclude="build/*" --exclude="doc/*" --exclude="libs/*" --exclude="tests/*" --exclude="..Rcheck/*" --include="*.cpp" --include="*.h" --include="*.hpp" --include="*.Rd" --include="*.c" --include="*.R" --include="*/" --exclude="*" --exclude="pkg/*" --exclude="pkg" . $destination + +# Copy datasets +mkdir $destination/data +cp data/cec.mouse1.spherical.RData $destination/data +cp data/cec.tset.RData $destination/data +cp data/cec.ellipsegauss.RData $destination/data +cp data/svm.transduction.RData $destination/data +cp data/svm_breast_cancer_dataset.RData $destination/data + +# Some individual files +cp DESCRIPTION $destination +cp LICENSE $destination +cp NAMESPACE $destination +cp demo/00Index $destination/demo +cp src/Makevars $destination/src +cp src/Makevars.win $destination/src +cp inst/include/svmlight/Makefile $destination/inst/include/svmlight/ +cp inst/include/svmlight/LICENSE.txt $destination/inst/include/svmlight/ + +# TODO: this is scary, but rsync refuses to stop copying hidden files +# NOTE: be super, super scared of modyfing next line, and don't add -f by no means +# we all remember Steam bug, lol +rm -r $destination/..Rcheck +rm -r $destination/.git +rm -r $destination/.idea +rm -r pkg/pkg +rm -r pkg/doc +rm -r pkg/cmake +rm -r pkg/libs diff --git a/src/root.cpp b/src/root.cpp new file mode 100644 index 00000000..e69de29b diff --git a/src/svm/libsvm_runner.cpp b/src/svm/libsvm_runner.cpp new file mode 100644 index 00000000..65c4b45e --- /dev/null +++ b/src/svm/libsvm_runner.cpp @@ -0,0 +1,524 @@ +/* + * LibSVMRunner.cpp + * + * Created on: Apr 7, 2014 + * Author: sacherus + */ + +#include +#include +#include +#include +#include + +#include "libsvm_runner.h" +#include "svm_basic.h" +#include "svm_utils.h" +#include "utils/cutils.h" +#include "utils/utils.h" + +#define Malloc(type,n) (type *)malloc((n)*sizeof(type)) +svm_parameter get_default_params(); + + +LibSVMRunner::LibSVMRunner() { + // TODO Auto-generated constructor stub +} + +LibSVMRunner::~LibSVMRunner() { + // TODO Auto-generated destructor stub +} + +void LibSVMRunner::processRequest(SVMConfiguration& config) { + +// Training + if (!config.isPrediction()) { + svm_node** node = 0; + if(config.isSparse()) { + node = ArmaSpMatToSvmNode(config.sparse_data); + } else { + node = armatlib(config.data); + } + svm_parameter* param = configuration_to_problem(config); + parseCommandLine(config, *param); + prob.l = config.target.n_rows; + prob.y = vectlib(config.target); + prob.x = node; + save_model_to_config(config, param, prob); + config.w = (config.support_vectors * config.alpha_y); + + } else { + arma_prediction(config); + } +} + +bool LibSVMRunner::canHandle(SVMConfiguration& config) { + if (config.use_example_weights) { + return false; + } + return config.library == LIBSVM; +} + +bool LibSVMRunner::save_model_to_config(SVMConfiguration& config, + svm_parameter* param, svm_problem& problem) { + + const char *error_msg; + + error_msg = svm_check_parameter(&prob, param, config.log); + + if (error_msg) { + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string(error_msg)) + return false; + } + //int* nr = Malloc(int, 1); + int* nclasses = Malloc(int, 1); + + model = svm_train(&prob, param, config.log); + //*nr = config.support_vectors.n_rows; //support vectors + *nclasses = model->nr_class; + config.nr_class = model->nr_class; + LOG(config.log, LogLevel::TRACE_LEVEL, "save_model_to_config writing down alphas, nclasses= " + svm_to_str(config.nr_class)); + + int nr_support_vectors = model->l; + //conversion vec->SpCol + arma::vec alpha_y_tmp = arma::vec(model->sv_coef[0], nr_support_vectors); + //not my fault. Arma fault :) + config.alpha_y = arma::zeros(nr_support_vectors); + for(int i=0;irho[0]; + config.iter = model->iter; + // memcpy(config.rho, , + // config.nr_class * (config.nr_class - 1) / 2 * sizeof(double)); + + //config.sv_indices = (int*) malloc(config.l * sizeof(int)); + //svm_get_sv_indices(model, config.sv_indices, config.log); + + int dim = config.getDataDim(); + ASSERT(dim > 0); + //config.support_vectors = SvmUtils::libtoarma(model->SV, nr_support_vectors, dim); + // + LOG(config.log, LogLevel::TRACE_LEVEL, "save_model_to_config writing down SV, n_SV = " + svm_to_str(nr_support_vectors)); + config.support_vectors = SvmUtils::SvmNodeToArmaSpMat(model->SV, nr_support_vectors, dim); + LOG(config.log, LogLevel::TRACE_LEVEL, "save_model_to_config wrote down SV, n_SV = " + svm_to_str(config.support_vectors.n_cols)); + LOG(config.log, LogLevel::TRACE_LEVEL, "save_model_to_config wrote down SV, dim = " + svm_to_str(config.support_vectors.n_rows)); + + // TODO: WTF!!!!!??? + if (config.svm_type < 2) { + config.label = (int *) malloc(*nclasses * sizeof(int)); + config.nSV = (int *) malloc(*nclasses * sizeof(int)); + memcpy(config.label, model->label, *nclasses * sizeof(int)); + memcpy(config.nSV, model->nSV, *nclasses * sizeof(int)); + } + + config.neg_target = model->label[1]; + config.pos_target = model->label[0]; + + svm_destroy_param(param,config.log); + svm_free_and_destroy_model(&model,config.log); + + return true; +} + +svm_model* LibSVMRunner::load_model_from_config(SVMConfiguration& config, + svm_parameter* param) { + + const char *error_msg; + error_msg = svm_check_parameter(&prob, param,config.log); + + if (error_msg) { + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string(error_msg)) + return 0; + } + + model = Malloc(svm_model, 1); + + model->l = config.getSVCount(); //support vectors number + model->nr_class = config.nr_class; + model->param = *param; + + model->sv_coef = (double **) malloc(model->nr_class * sizeof(double*)); + for (int i = 0; i < config.nr_class - 1; i++) { + model->sv_coef[i] = (double *) malloc(config.getSVCount() * sizeof(double)); + std::copy(config.alpha_y.begin(), config.alpha_y.end(), model->sv_coef[i * config.getSVCount()]); + } + + model->SV = armatlib(arma::mat(config.support_vectors.t())); + // FIXME: Why below is not working? + //model->SV = ArmaSpMatToSvmNode(config.support_vectors); + + model->rho = (double *) malloc( + config.nr_class * (config.nr_class - 1) / 2 * sizeof(double)); + + //i need change sign in b + double local_rho = -config.b; + + if(config.nr_class != 2) { + throw std::invalid_argument( "Code is not implemented for more than 2 classes right now"); + } + + memcpy(model->rho, &local_rho, + config.nr_class * (config.nr_class - 1) / 2 * sizeof(double)); + + model->free_sv = 1; + + + if (config.svm_type < 2) { + model->label = (int *) malloc(config.nr_class * sizeof(int)); + model->nSV = (int *) malloc(config.nr_class * sizeof(int)); + memcpy(model->label, config.label, config.nr_class * sizeof(int)); + memcpy(model->nSV, config.nSV, config.nr_class * sizeof(int)); + } + + return model; +} + +svm_parameter* LibSVMRunner::configuration_to_problem( + SVMConfiguration& config) { + svm_parameter* param; + param = Malloc(svm_parameter, 1); + param->svm_type = config.svm_type; + // param->kernel_type = config.kernel_type; + param->degree = config.degree; + param->gamma = config.gamma; // 1/num_features + param->coef0 = config.coef0; + param->nu = config.nu; + param->cache_size = config.cache_size; + param->C = config.C; + param->eps = config.eps; + param->p = config.p; + param->shrinking = config.shrinking; + param->probability = config.probability; + param->nr_weight = config.class_weight_length; + param->weight_label = config.libsvm_class_weights_labels; + param->weight = config.libsvm_class_weights; + param->max_iter = config.max_iter; + + if ( config.kernel_type == _LINEAR ) { + param->kernel_type = LINEAR; + } + else if ( config.kernel_type == _POLY ) { + param->kernel_type = POLY; + } + else if ( config.kernel_type == _RBF ) { + param->kernel_type = RBF; + } + else if ( config.kernel_type == _SIGMOID ) { + param->kernel_type = SIGMOID; + } + return param; +} + +/* + * Prediction + */ + +struct svm_node *x; +int max_nr_attr = 64; + +int predict_probability = 0; + +/* + struct svm_model* model; + static char *line = NULL; + static int max_line_len; + */ + +/* + * Armadillo matrix format to libsvm format + */ +struct svm_node ** LibSVMRunner::armatlib(arma::mat x) { + int r = x.n_rows; + int c = x.n_cols; + struct svm_node** sparse; + int i, ii, count; + + sparse = (struct svm_node **) malloc(r * sizeof(struct svm_node *)); + /* iterate over rows */ + for (i = 0; i < r; i++) { + /* determine nr. of non-zero elements */ + /* iterate over columns */ + for (count = ii = 0; ii < c; ii++) + if (x(i, ii) != 0) + count++; + + /* allocate memory for column elements */ + sparse[i] = (struct svm_node *) malloc( + (count + 1) * sizeof(struct svm_node)); + + /* set column elements */ + for (count = ii = 0; ii < c; ii++) + if (x(i, ii) != 0) { + sparse[i][count].index = ii + 1; + sparse[i][count].value = x(i, ii); + count++; + } + + /* set termination element */ + sparse[i][count].index = -1; + } + + return sparse; +} + +/* + * Vector with target to lisvm format + */ +double * LibSVMRunner::vectlib(arma::vec target) { + double* return_target; + return_target = Malloc(double, target.n_rows); + for (unsigned int i = 0; i < target.n_rows; i++) { + return_target[i] = target(i); + } + return return_target; +} + +svm_node** LibSVMRunner::SparseToSVMNode(arma::vec& x, int r, arma::Col& rowindex, arma::Col& colindex) { + struct svm_node** sparse; + int i, ii, count = 0, nnz = 0; + + sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node*)); + for (i = 0; i < r; i++) { + /* allocate memory for column elements */ + nnz = rowindex[i+1] - rowindex[i]; + sparse[i] = (struct svm_node *) malloc ((nnz + 1) * sizeof(struct svm_node)); + + /* set column elements */ + for (ii = 0; ii < nnz; ii++) { + sparse[i][ii].index = colindex[count]; + sparse[i][ii].value = x[count]; + count++; + } + + /* set termination element */ + sparse[i][ii].index = -1; + } + + return sparse; +} + + +void LibSVMRunner::arma_prediction(SVMConfiguration& config) { + struct svm_model* m; + struct svm_node ** train; + svm_parameter *params; + int training_examples = config.getDataExamplesNumber(); + + params = configuration_to_problem(config); + m = load_model_from_config(config, params); + +// TODO: READ MODEL FROM PARAMETERS + if(config.isSparse()) { + train = ArmaSpMatToSvmNode(config.sparse_data); + } else { + train = armatlib(config.data); + } + double* ret = Malloc(double, training_examples); + + for (int i = 0; i < training_examples; i++) + ret[i] = svm_predict(m, train[i],config.log); + + arma::vec ret_vec(ret, training_examples); + config.result = ret_vec; + /* TODO: CLEAN MEMORY IN BETTER WAY THINK OF OTHER PARAMETERS + * Clean memory: + * -array matrix + * -model + */ + for (int i = 0; i < training_examples; i++) + free(train[i]); + free(train); + //TODO: THIS SHOULD WORK WITH PREDICTIONS 2X, now it's not working +// svm_free_and_destroy_model(&m); + svm_destroy_param(params,config.log); + free(ret); +} + +svm_node** LibSVMRunner::ArmaSpMatToSvmNode(arma::sp_mat & sparse_data) { + int max_rows = sparse_data.n_rows + 1; + svm_node **sn = new svm_node*[sparse_data.n_cols + 1]; + svm_node * tmp_col = new svm_node[max_rows]; + long int current_col_counter; + long int row; + for (unsigned int col = 0; col < sparse_data.n_cols; ++col) { + current_col_counter = 0; + row = -1; + + for ( + arma::sp_mat::iterator it = sparse_data.begin_col(col); + it != sparse_data.end_col(col); ++it + ) { + tmp_col[current_col_counter].value = *it; + tmp_col[current_col_counter++].index = it.row() + 1; + } + + sn[col] = new svm_node[current_col_counter + 1]; + memcpy(sn[col], tmp_col, current_col_counter * sizeof(svm_node)); + sn[col][current_col_counter].index = -1.0; + } + delete [] tmp_col; + return sn; +} + +void LibSVMRunner::parseCommandLine( + SVMConfiguration& config, svm_parameter& param +) { + int argc = 0; + char** argv = 0; + + if (!config.svm_options.empty()) { + argc = check_argc(std::string("gmum ") + config.svm_options); + argv = to_argv(std::string("gmum ") + config.svm_options); + char input_file_name[1024]; + char model_file_name[1024]; + + LibSVMRunner::libraryParseCommandLine( + config, param, argc, argv, input_file_name, model_file_name); + } +} + +void LibSVMRunner::libraryParseCommandLine( + SVMConfiguration& config, + svm_parameter& param, + int argc, + char** argv, + char* input_file_name, + char* model_file_name +) { + int i; + void (*print_func)(const char*) = NULL; // default printing to stdout + + // default values + /* These are being handled in SVMClient + param.svm_type = C_SVC; + param.kernel_type = RBF; + param.degree = 3; + param.gamma = 0; // 1/num_features + param.coef0 = 0; + param.nu = 0.5; + param.cache_size = 100; + param.C = 1; + param.eps = 1e-3; + param.p = 0.1; + param.shrinking = 1; + param.probability = 0; + param.nr_weight = 0; + param.weight_label = NULL; + param.weight = NULL; + cross_validation = 0; + */ + + // parse options + for(i=1;i=argc) + break; + //exit_with_help(); + switch(argv[i-1][1]) + { + case 's': + param.svm_type = atoi(argv[i]); + break; + case 't': + param.kernel_type = atoi(argv[i]); + break; + case 'd': + param.degree = atoi(argv[i]); + break; + case 'g': + param.gamma = atof(argv[i]); + break; + case 'r': + param.coef0 = atof(argv[i]); + break; + case 'n': + param.nu = atof(argv[i]); + break; + case 'm': + param.cache_size = atof(argv[i]); + break; + case 'c': + param.C = atof(argv[i]); + break; + case 'e': + param.eps = atof(argv[i]); + break; + case 'p': + param.p = atof(argv[i]); + break; + case 'h': + param.shrinking = atoi(argv[i]); + break; + case 'b': + param.probability = atoi(argv[i]); + break; + case 'q': + //print_func = &print_null; + i--; + break; + case 'v': + // TODO: We need to wrap more functions from svm-train.c + LOG( + config.log, + LogLevel::ERR_LEVEL, + "-v n: n-fold cross validation mode: not implemented." + ); + break; + /* + cross_validation = 1; + nr_fold = atoi(argv[i]); + if(nr_fold < 2) + { + C_FPRINTF(stderr,"n-fold cross validation: n must >= 2\n"); + //exit_with_help(); + } + break; + */ + case 'w': + ++param.nr_weight; + param.weight_label = (int *)realloc( + param.weight_label,sizeof(int)*param.nr_weight); + param.weight = (double *)realloc( + param.weight,sizeof(double)*param.nr_weight); + param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]); + param.weight[param.nr_weight-1] = atof(argv[i]); + break; + default: + C_FPRINTF(stderr,"Unknown option: -%c\n", argv[i-1][1]); + //exit_with_help(); + } + } + + //svm_set_print_string_function(print_func); + + // determine filenames + // FIXME: Decide what TODO with this code + + /* + if(i>=argc) + exit_with_help(); + + strcpy(input_file_name, argv[i]); + + if(i +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "svm.h" +#include "svm_utils.h" +#include "svm_basic.h" +#include "utils/cutils.h" +#include "utils/utils.h" + +#ifdef RCPP_INTERFACE +#include +#endif + +int libsvm_version = LIBSVM_VERSION; +typedef float Qfloat; +typedef signed char schar; +//#ifndef min +//template static inline T min(T x,T y) { return (x static inline T max(T x,T y) { return (x>y)?x:y; } +//#endif +//template static inline void swap(T& x, T& y) { T t=x; x=y; y=t; } +template static inline void clone(T*& dst, S* src, int n) +{ + dst = new T[n]; + memcpy((void *)dst,(void *)src,sizeof(T)*n); +} +static inline double powi(double base, int times) +{ + double tmp = base, ret = 1.0; + + for(int t=times; t>0; t/=2) + { + if(t%2==1) ret*=tmp; + tmp = tmp * tmp; + } + return ret; +} +#define INF HUGE_VAL +#define TAU 1e-12 +#define Malloc(type,n) (type *)malloc((n)*sizeof(type)) + +// +// Kernel Cache +// +// l is the number of total data items +// size is the cache size limit in bytes +// +class Cache +{ +public: + Cache(int l,long int size); + ~Cache(); + + // request data [0,len) + // return some position p where [p,len) need to be filled + // (p >= len if nothing needs to be filled) + int get_data(const int index, Qfloat **data, int len); + void swap_index(int i, int j); +private: + int l; + long int size; + struct head_t + { + head_t *prev, *next; // a circular list + Qfloat *data; + int len; // data[0,len) is cached in this entry + }; + + head_t *head; + head_t lru_head; + void lru_delete(head_t *h); + void lru_insert(head_t *h); +}; + +Cache::Cache(int l_,long int size_):l(l_),size(size_) +{ + head = (head_t *)calloc(l,sizeof(head_t)); // initialized to 0 + size /= sizeof(Qfloat); + size -= l * sizeof(head_t) / sizeof(Qfloat); + size = max(size, 2 * (long int) l); // cache must be large enough for two columns + lru_head.next = lru_head.prev = &lru_head; +} + +Cache::~Cache() +{ + for(head_t *h = lru_head.next; h != &lru_head; h=h->next) + free(h->data); + free(head); +} + +void Cache::lru_delete(head_t *h) +{ + // delete from current location + h->prev->next = h->next; + h->next->prev = h->prev; +} + +void Cache::lru_insert(head_t *h) +{ + // insert to last position + h->next = &lru_head; + h->prev = lru_head.prev; + h->prev->next = h; + h->next->prev = h; +} + +int Cache::get_data(const int index, Qfloat **data, int len) +{ + head_t *h = &head[index]; + if(h->len) lru_delete(h); + int more = len - h->len; + + if(more > 0) + { + // free old space + while(size < more) + { + head_t *old = lru_head.next; + lru_delete(old); + free(old->data); + size += old->len; + old->data = 0; + old->len = 0; + } + + // allocate new space + h->data = (Qfloat *)realloc(h->data,sizeof(Qfloat)*len); + size -= more; + swap(h->len,len); + } + + lru_insert(h); + *data = h->data; + return len; +} + +void Cache::swap_index(int i, int j) +{ + if(i==j) return; + + if(head[i].len) lru_delete(&head[i]); + if(head[j].len) lru_delete(&head[j]); + swap(head[i].data,head[j].data); + swap(head[i].len,head[j].len); + if(head[i].len) lru_insert(&head[i]); + if(head[j].len) lru_insert(&head[j]); + + if(i>j) swap(i,j); + for(head_t *h = lru_head.next; h!=&lru_head; h=h->next) + { + if(h->len > i) + { + if(h->len > j) + swap(h->data[i],h->data[j]); + else + { + // give up + lru_delete(h); + free(h->data); + size += h->len; + h->data = 0; + h->len = 0; + } + } + } +} + +// +// Kernel evaluation +// +// the static method k_function is for doing single kernel evaluation +// the constructor of Kernel prepares to calculate the l*l kernel matrix +// the member function get_Q is for getting one column from the Q Matrix +// +class QMatrix { +public: + virtual Qfloat *get_Q(int column, int len) const = 0; + virtual double *get_QD() const = 0; + virtual void swap_index(int i, int j) const = 0; + virtual ~QMatrix() {} +}; + +class Kernel: public QMatrix { +public: + Kernel(int l, svm_node * const * x, const svm_parameter& param); + virtual ~Kernel(); + + static double k_function(const svm_node *x, const svm_node *y, + const svm_parameter& param); + virtual Qfloat *get_Q(int column, int len) const = 0; + virtual double *get_QD() const = 0; + virtual void swap_index(int i, int j) const // no so const... + { + swap(x[i],x[j]); + if(x_square) swap(x_square[i],x_square[j]); + } +protected: + + double (Kernel::*kernel_function)(int i, int j) const; + +private: + const svm_node **x; + double *x_square; + + // svm_parameter + const int kernel_type; + const int degree; + const double gamma; + const double coef0; + + static double dot(const svm_node *px, const svm_node *py); + double kernel_linear(int i, int j) const + { + return dot(x[i],x[j]); + } + double kernel_poly(int i, int j) const + { + return powi(gamma*dot(x[i],x[j])+coef0,degree); + } + double kernel_rbf(int i, int j) const + { + return exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); + } + double kernel_sigmoid(int i, int j) const + { + return tanh(gamma*dot(x[i],x[j])+coef0); + } + double kernel_precomputed(int i, int j) const + { + return x[i][(int)(x[j][0].value)].value; + } +}; + +Kernel::Kernel(int l, svm_node * const * x_, const svm_parameter& param) +:kernel_type(param.kernel_type), degree(param.degree), + gamma(param.gamma), coef0(param.coef0) +{ + switch(kernel_type) + { + case LINEAR: + kernel_function = &Kernel::kernel_linear; + break; + case POLY: + kernel_function = &Kernel::kernel_poly; + break; + case RBF: + kernel_function = &Kernel::kernel_rbf; + break; + case SIGMOID: + kernel_function = &Kernel::kernel_sigmoid; + break; + case PRECOMPUTED: + kernel_function = &Kernel::kernel_precomputed; + break; + } + + clone(x,x_,l); + + if(kernel_type == RBF) + { + x_square = new double[l]; + for(int i=0;iindex != -1 && py->index != -1) + { + if(px->index == py->index) + { + sum += px->value * py->value; + ++px; + ++py; + } + else + { + if(px->index > py->index) + ++py; + else + ++px; + } + } + return sum; +} + +double Kernel::k_function(const svm_node *x, const svm_node *y, + const svm_parameter& param) +{ + switch(param.kernel_type) + { + case LINEAR: + return dot(x,y); + case POLY: + return powi(param.gamma*dot(x,y)+param.coef0,param.degree); + case RBF: + { + double sum = 0; + while(x->index != -1 && y->index !=-1) + { + if(x->index == y->index) + { + double d = x->value - y->value; + sum += d*d; + ++x; + ++y; + } + else + { + if(x->index > y->index) + { + sum += y->value * y->value; + ++y; + } + else + { + sum += x->value * x->value; + ++x; + } + } + } + + while(x->index != -1) + { + sum += x->value * x->value; + ++x; + } + + while(y->index != -1) + { + sum += y->value * y->value; + ++y; + } + + return exp(-param.gamma*sum); + } + case SIGMOID: + return tanh(param.gamma*dot(x,y)+param.coef0); + case PRECOMPUTED: //x: test (validation), y: SV + return x[(int)(y->value)].value; + default: + return 0; // Unreachable + } +} + +// An SMO algorithm in Fan et al., JMLR 6(2005), p. 1889--1918 +// Solves: +// +// min 0.5(\alpha^T Q \alpha) + p^T \alpha +// +// y^T \alpha = \delta +// y_i = +1 or -1 +// 0 <= alpha_i <= Cp for y_i = 1 +// 0 <= alpha_i <= Cn for y_i = -1 +// +// Given: +// +// Q, p, y, Cp, Cn, and an initial feasible point \alpha +// l is the size of vectors and matrices +// eps is the stopping tolerance +// +// solution will be put in \alpha, objective value will be put in obj +// +class Solver { +public: + Solver() {}; + virtual ~Solver() {}; + + struct SolutionInfo { + double obj; + double rho; + double upper_bound_p; + double upper_bound_n; + bool solve_timed_out; // gmum.r modification for reaching mac_iter + int iter; + double r; // for Solver_NU + }; + + void Solve(int l, const QMatrix& Q, const double *p_, const schar *y_, + double *alpha_, double Cp, double Cn, double eps, + SolutionInfo* si, int shrinking, Logger &log, int max_iter); +protected: + int active_size; + schar *y; + double *G; // gradient of objective function + enum { LOWER_BOUND, UPPER_BOUND, FREE }; + char *alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE + double *alpha; + const QMatrix *Q; + const double *QD; + double eps; + double Cp,Cn; + double *p; + int *active_set; + double *G_bar; // gradient, if we treat free variables as 0 + int l; + bool unshrink; // XXX + + double get_C(int i) + { + return (y[i] > 0)? Cp : Cn; + } + void update_alpha_status(int i) + { + if(alpha[i] >= get_C(i)) + alpha_status[i] = UPPER_BOUND; + else if(alpha[i] <= 0) + alpha_status[i] = LOWER_BOUND; + else alpha_status[i] = FREE; + } + bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } + bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } + bool is_free(int i) { return alpha_status[i] == FREE; } + void swap_index(int i, int j); + void reconstruct_gradient(Logger &log); + virtual int select_working_set(int &i, int &j); + virtual double calculate_rho(); + virtual void do_shrinking(Logger &log); +private: + bool be_shrunk(int i, double Gmax1, double Gmax2); +}; + +void Solver::swap_index(int i, int j) +{ + Q->swap_index(i,j); + swap(y[i],y[j]); + swap(G[i],G[j]); + swap(alpha_status[i],alpha_status[j]); + swap(alpha[i],alpha[j]); + swap(p[i],p[j]); + swap(active_set[i],active_set[j]); + swap(G_bar[i],G_bar[j]); +} + +void Solver::reconstruct_gradient(Logger &log) +{ + // reconstruct inactive elements of G from G_bar and free variables + + if(active_size == l) return; + + int i,j; + int nr_free = 0; + + for(j=active_size;j 2*active_size*(l-active_size)) + { + for(i=active_size;iget_Q(i,active_size); + for(j=0;jget_Q(i,l); + double alpha_i = alpha[i]; + for(j=active_size;jl = l; + this->Q = &Q; + QD=Q.get_QD(); + clone(p, p_,l); + clone(y, y_,l); + clone(alpha,alpha_,l); + this->Cp = Cp; + this->Cn = Cn; + this->eps = eps; + unshrink = false; + + // initialize alpha_status + { + alpha_status = new char[l]; + for(int i=0;iINT_MAX/100 ? INT_MAX : 100*l); + int counter = min(l,1000)+1; + + while(1) + { + + // set max_iter to -1 to disable the mechanism + if ((max_iter != -1) && (iter >= max_iter)) { + LOG(log, LogLevel::WARNING_LEVEL, "WARNING_LEVEL: reaching max number of iterations"); + si->solve_timed_out = true; + break; + + } + // show progress and do shrinking + + if(--counter == 0) + { + counter = min(l,1000); + if(shrinking) do_shrinking(log); + } + + int i,j; + if(select_working_set(i,j)!=0) + { + // reconstruct the whole gradient + reconstruct_gradient(log); + // reset active set size and check + active_size = l; + if(select_working_set(i,j)!=0) + break; + else + counter = 1; // do shrinking next iteration + } + + ++iter; + + // update alpha[i] and alpha[j], handle bounds carefully + + const Qfloat *Q_i = Q.get_Q(i,active_size); + const Qfloat *Q_j = Q.get_Q(j,active_size); + + double C_i = get_C(i); + double C_j = get_C(j); + + double old_alpha_i = alpha[i]; + double old_alpha_j = alpha[j]; + + if(y[i]!=y[j]) + { + double quad_coef = QD[i]+QD[j]+2*Q_i[j]; + if (quad_coef <= 0) + quad_coef = TAU; + double delta = (-G[i]-G[j])/quad_coef; + double diff = alpha[i] - alpha[j]; + alpha[i] += delta; + alpha[j] += delta; + + if(diff > 0) + { + if(alpha[j] < 0) + { + alpha[j] = 0; + alpha[i] = diff; + } + } + else + { + if(alpha[i] < 0) + { + alpha[i] = 0; + alpha[j] = -diff; + } + } + if(diff > C_i - C_j) + { + if(alpha[i] > C_i) + { + alpha[i] = C_i; + alpha[j] = C_i - diff; + } + } + else + { + if(alpha[j] > C_j) + { + alpha[j] = C_j; + alpha[i] = C_j + diff; + } + } + } + else + { + double quad_coef = QD[i]+QD[j]-2*Q_i[j]; + if (quad_coef <= 0) + quad_coef = TAU; + double delta = (G[i]-G[j])/quad_coef; + double sum = alpha[i] + alpha[j]; + alpha[i] -= delta; + alpha[j] += delta; + + if(sum > C_i) + { + if(alpha[i] > C_i) + { + alpha[i] = C_i; + alpha[j] = sum - C_i; + } + } + else + { + if(alpha[j] < 0) + { + alpha[j] = 0; + alpha[i] = sum; + } + } + if(sum > C_j) + { + if(alpha[j] > C_j) + { + alpha[j] = C_j; + alpha[i] = sum - C_j; + } + } + else + { + if(alpha[i] < 0) + { + alpha[i] = 0; + alpha[j] = sum; + } + } + } + + // update G + + double delta_alpha_i = alpha[i] - old_alpha_i; + double delta_alpha_j = alpha[j] - old_alpha_j; + + for(int k=0;krho = calculate_rho(); + + // calculate objective value + { + double v = 0; + int i; + for(i=0;iobj = v/2; + } + + // put back the solution + { + for(int i=0;iupper_bound_p = Cp; + si->upper_bound_n = Cn; + + //gmum.r -> sacherus + si->iter =iter; + LOG(log, LogLevel::INFO_LEVEL, "optimization finished, #iter = " + to_string(iter)); + + delete[] p; + delete[] y; + delete[] alpha; + delete[] alpha_status; + delete[] active_set; + delete[] G; + delete[] G_bar; +} + +// return 1 if already optimal, return 0 otherwise +int Solver::select_working_set(int &out_i, int &out_j) +{ + // return i,j such that + // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) + // j: minimizes the decrease of obj value + // (if quadratic coefficeint <= 0, replace it with tau) + // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) + + double Gmax = -INF; + double Gmax2 = -INF; + int Gmax_idx = -1; + int Gmin_idx = -1; + double obj_diff_min = INF; + + for(int t=0;t= Gmax) + { + Gmax = -G[t]; + Gmax_idx = t; + } + } + else + { + if(!is_lower_bound(t)) + if(G[t] >= Gmax) + { + Gmax = G[t]; + Gmax_idx = t; + } + } + + int i = Gmax_idx; + const Qfloat *Q_i = NULL; + if(i != -1) // NULL Q_i not accessed: Gmax=-INF if i=-1 + Q_i = Q->get_Q(i,active_size); + + for(int j=0;j= Gmax2) + Gmax2 = G[j]; + if (grad_diff > 0) + { + double obj_diff; + double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j]; + if (quad_coef > 0) + obj_diff = -(grad_diff*grad_diff)/quad_coef; + else + obj_diff = -(grad_diff*grad_diff)/TAU; + + if (obj_diff <= obj_diff_min) + { + Gmin_idx=j; + obj_diff_min = obj_diff; + } + } + } + } + else + { + if (!is_upper_bound(j)) + { + double grad_diff= Gmax-G[j]; + if (-G[j] >= Gmax2) + Gmax2 = -G[j]; + if (grad_diff > 0) + { + double obj_diff; + double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j]; + if (quad_coef > 0) + obj_diff = -(grad_diff*grad_diff)/quad_coef; + else + obj_diff = -(grad_diff*grad_diff)/TAU; + + if (obj_diff <= obj_diff_min) + { + Gmin_idx=j; + obj_diff_min = obj_diff; + } + } + } + } + } + + if(Gmax+Gmax2 < eps) + return 1; + + out_i = Gmax_idx; + out_j = Gmin_idx; + return 0; +} + +bool Solver::be_shrunk(int i, double Gmax1, double Gmax2) +{ + if(is_upper_bound(i)) + { + if(y[i]==+1) + return(-G[i] > Gmax1); + else + return(-G[i] > Gmax2); + } + else if(is_lower_bound(i)) + { + if(y[i]==+1) + return(G[i] > Gmax2); + else + return(G[i] > Gmax1); + } + else + return(false); +} + +void Solver::do_shrinking(Logger &log) +{ + int i; + double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) } + double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) } + + // find maximal violating pair first + for(i=0;i= Gmax1) + Gmax1 = -G[i]; + } + if(!is_lower_bound(i)) + { + if(G[i] >= Gmax2) + Gmax2 = G[i]; + } + } + else + { + if(!is_upper_bound(i)) + { + if(-G[i] >= Gmax2) + Gmax2 = -G[i]; + } + if(!is_lower_bound(i)) + { + if(G[i] >= Gmax1) + Gmax1 = G[i]; + } + } + } + + if(unshrink == false && Gmax1 + Gmax2 <= eps*10) + { + unshrink = true; + reconstruct_gradient(log); + active_size = l; + } + + for(i=0;i i) + { + if (!be_shrunk(active_size, Gmax1, Gmax2)) + { + swap_index(i,active_size); + break; + } + active_size--; + } + } +} + +double Solver::calculate_rho() +{ + double r; + int nr_free = 0; + double ub = INF, lb = -INF, sum_free = 0; + for(int i=0;i0) + r = sum_free/nr_free; + else + r = (ub+lb)/2; + + return r; +} + +// +// Solver for nu-svm classification and regression +// +// additional constraint: e^T \alpha = constant +// +class Solver_NU: public Solver +{ +public: + Solver_NU() {} + void Solve(int l, const QMatrix& Q, const double *p, const schar *y, + double *alpha, double Cp, double Cn, double eps, + SolutionInfo* si, int shrinking, Logger &log, int max_iter) + { + this->si = si; + Solver::Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking,log, max_iter); + } +private: + SolutionInfo *si; + int select_working_set(int &i, int &j); + double calculate_rho(); + bool be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4); + void do_shrinking(Logger &log); +}; + +// return 1 if already optimal, return 0 otherwise +int Solver_NU::select_working_set(int &out_i, int &out_j) +{ + // return i,j such that y_i = y_j and + // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) + // j: minimizes the decrease of obj value + // (if quadratic coefficeint <= 0, replace it with tau) + // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) + + double Gmaxp = -INF; + double Gmaxp2 = -INF; + int Gmaxp_idx = -1; + + double Gmaxn = -INF; + double Gmaxn2 = -INF; + int Gmaxn_idx = -1; + + int Gmin_idx = -1; + double obj_diff_min = INF; + + for(int t=0;t= Gmaxp) + { + Gmaxp = -G[t]; + Gmaxp_idx = t; + } + } + else + { + if(!is_lower_bound(t)) + if(G[t] >= Gmaxn) + { + Gmaxn = G[t]; + Gmaxn_idx = t; + } + } + + int ip = Gmaxp_idx; + int in = Gmaxn_idx; + const Qfloat *Q_ip = NULL; + const Qfloat *Q_in = NULL; + if(ip != -1) // NULL Q_ip not accessed: Gmaxp=-INF if ip=-1 + Q_ip = Q->get_Q(ip,active_size); + if(in != -1) + Q_in = Q->get_Q(in,active_size); + + for(int j=0;j= Gmaxp2) + Gmaxp2 = G[j]; + if (grad_diff > 0) + { + double obj_diff; + double quad_coef = QD[ip]+QD[j]-2*Q_ip[j]; + if (quad_coef > 0) + obj_diff = -(grad_diff*grad_diff)/quad_coef; + else + obj_diff = -(grad_diff*grad_diff)/TAU; + + if (obj_diff <= obj_diff_min) + { + Gmin_idx=j; + obj_diff_min = obj_diff; + } + } + } + } + else + { + if (!is_upper_bound(j)) + { + double grad_diff=Gmaxn-G[j]; + if (-G[j] >= Gmaxn2) + Gmaxn2 = -G[j]; + if (grad_diff > 0) + { + double obj_diff; + double quad_coef = QD[in]+QD[j]-2*Q_in[j]; + if (quad_coef > 0) + obj_diff = -(grad_diff*grad_diff)/quad_coef; + else + obj_diff = -(grad_diff*grad_diff)/TAU; + + if (obj_diff <= obj_diff_min) + { + Gmin_idx=j; + obj_diff_min = obj_diff; + } + } + } + } + } + + if(max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps) + return 1; + + if (y[Gmin_idx] == +1) + out_i = Gmaxp_idx; + else + out_i = Gmaxn_idx; + out_j = Gmin_idx; + + return 0; +} + +bool Solver_NU::be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4) +{ + if(is_upper_bound(i)) + { + if(y[i]==+1) + return(-G[i] > Gmax1); + else + return(-G[i] > Gmax4); + } + else if(is_lower_bound(i)) + { + if(y[i]==+1) + return(G[i] > Gmax2); + else + return(G[i] > Gmax3); + } + else + return(false); +} + +void Solver_NU::do_shrinking(Logger &log) +{ + double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) } + double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) } + double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) } + double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) } + + // find maximal violating pair first + int i; + for(i=0;i Gmax1) Gmax1 = -G[i]; + } + else if(-G[i] > Gmax4) Gmax4 = -G[i]; + } + if(!is_lower_bound(i)) + { + if(y[i]==+1) + { + if(G[i] > Gmax2) Gmax2 = G[i]; + } + else if(G[i] > Gmax3) Gmax3 = G[i]; + } + } + + if(unshrink == false && max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10) + { + unshrink = true; + reconstruct_gradient(log); + active_size = l; + } + + for(i=0;i i) + { + if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4)) + { + swap_index(i,active_size); + break; + } + active_size--; + } + } +} + +double Solver_NU::calculate_rho() +{ + int nr_free1 = 0,nr_free2 = 0; + double ub1 = INF, ub2 = INF; + double lb1 = -INF, lb2 = -INF; + double sum_free1 = 0, sum_free2 = 0; + + for(int i=0;i 0) + r1 = sum_free1/nr_free1; + else + r1 = (ub1+lb1)/2; + + if(nr_free2 > 0) + r2 = sum_free2/nr_free2; + else + r2 = (ub2+lb2)/2; + + si->r = (r1+r2)/2; + return (r1-r2)/2; +} + +// +// Q matrices for various formulations +// +class SVC_Q: public Kernel +{ +public: + SVC_Q(const svm_problem& prob, const svm_parameter& param, const schar *y_) + :Kernel(prob.l, prob.x, param) + { + clone(y,y_,prob.l); + cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20))); + QD = new double[prob.l]; + for(int i=0;i*kernel_function)(i,i); + } + + Qfloat *get_Q(int i, int len) const + { + Qfloat *data; + int start, j; + if((start = cache->get_data(i,&data,len)) < len) + { + for(j=start;j*kernel_function)(i,j)); + } + return data; + } + + double *get_QD() const + { + return QD; + } + + void swap_index(int i, int j) const + { + cache->swap_index(i,j); + Kernel::swap_index(i,j); + swap(y[i],y[j]); + swap(QD[i],QD[j]); + } + + ~SVC_Q() + { + delete[] y; + delete cache; + delete[] QD; + } +private: + schar *y; + Cache *cache; + double *QD; +}; + +class ONE_CLASS_Q: public Kernel +{ +public: + ONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) + :Kernel(prob.l, prob.x, param) + { + cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20))); + QD = new double[prob.l]; + for(int i=0;i*kernel_function)(i,i); + } + + Qfloat *get_Q(int i, int len) const + { + Qfloat *data; + int start, j; + if((start = cache->get_data(i,&data,len)) < len) + { + for(j=start;j*kernel_function)(i,j); + } + return data; + } + + double *get_QD() const + { + return QD; + } + + void swap_index(int i, int j) const + { + cache->swap_index(i,j); + Kernel::swap_index(i,j); + swap(QD[i],QD[j]); + } + + ~ONE_CLASS_Q() + { + delete cache; + delete[] QD; + } +private: + Cache *cache; + double *QD; +}; + +class SVR_Q: public Kernel +{ +public: + SVR_Q(const svm_problem& prob, const svm_parameter& param) + :Kernel(prob.l, prob.x, param) + { + l = prob.l; + cache = new Cache(l,(long int)(param.cache_size*(1<<20))); + QD = new double[2*l]; + sign = new schar[2*l]; + index = new int[2*l]; + for(int k=0;k*kernel_function)(k,k); + QD[k+l] = QD[k]; + } + buffer[0] = new Qfloat[2*l]; + buffer[1] = new Qfloat[2*l]; + next_buffer = 0; + } + + void swap_index(int i, int j) const + { + swap(sign[i],sign[j]); + swap(index[i],index[j]); + swap(QD[i],QD[j]); + } + + Qfloat *get_Q(int i, int len) const + { + Qfloat *data; + int j, real_i = index[i]; + if(cache->get_data(real_i,&data,l) < l) + { + for(j=0;j*kernel_function)(real_i,j); + } + + // reorder and copy + Qfloat *buf = buffer[next_buffer]; + next_buffer = 1 - next_buffer; + schar si = sign[i]; + for(j=0;jl; + double *minus_ones = new double[l]; + schar *y = new schar[l]; + + int i; + + for(i=0;iy[i] > 0) y[i] = +1; else y[i] = -1; + } + + Solver s; + s.Solve(l, SVC_Q(*prob,*param,y), minus_ones, y, + alpha, Cp, Cn, param->eps, si, param->shrinking, log, param->max_iter); + + double sum_alpha=0; + for(i=0;i=max_iter) + LOG(logger, LogLevel::INFO_LEVEL, "Reaching maximal iterations in two-class probability estimates"); + free(t); +} + +static double sigmoid_predict(double decision_value, double A, double B) +{ + double fApB = decision_value*A+B; + // 1-p used later; avoid catastrophic cancellation + if (fApB >= 0) + return exp(-fApB)/(1.0+exp(-fApB)); + else + return 1.0/(1+exp(fApB)) ; +} + +// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng +static void multiclass_probability(int k, double **r, double *p, Logger &log) +{ + int t,j; + int iter = 0, max_iter=max(100,k); + double **Q=Malloc(double *,k); + double *Qp=Malloc(double,k); + double pQp, eps=0.005/k; + + for (t=0;tmax_error) + max_error=error; + } + if (max_error=max_iter) + LOG(log, LogLevel::INFO_LEVEL, "Exceeds max_iter in multiclass_prob"); + for(t=0;tl); + double *dec_values = Malloc(double,prob->l); + + // random shuffle + for(i=0;il;i++) perm[i]=i; + for(i=0;il;i++) + { + int j = i+ed_c_rand()%(prob->l-i); + swap(perm[i],perm[j]); + } + for(i=0;il/nr_fold; + int end = (i+1)*prob->l/nr_fold; + int j,k; + struct svm_problem subprob; + + subprob.l = prob->l-(end-begin); + subprob.x = Malloc(struct svm_node*,subprob.l); + subprob.y = Malloc(double,subprob.l); + + k=0; + for(j=0;jx[perm[j]]; + subprob.y[k] = prob->y[perm[j]]; + ++k; + } + for(j=end;jl;j++) + { + subprob.x[k] = prob->x[perm[j]]; + subprob.y[k] = prob->y[perm[j]]; + ++k; + } + int p_count=0,n_count=0; + for(j=0;j0) + p_count++; + else + n_count++; + + if(p_count==0 && n_count==0) + for(j=begin;j 0 && n_count == 0) + for(j=begin;j 0) + for(j=begin;jx[perm[j]],&(dec_values[perm[j]]), log); + // ensure +1 -1 order; reason not using CV subroutine + dec_values[perm[j]] *= submodel->label[0]; + } + svm_free_and_destroy_model(&submodel, log); + svm_destroy_param(&subparam, log); + } + free(subprob.x); + free(subprob.y); + } + sigmoid_train(prob->l,dec_values,prob->y,probA,probB, log); + free(dec_values); + free(perm); +} + +// Return parameter of a Laplace distribution +static double svm_svr_probability( + const svm_problem *prob, const svm_parameter *param, Logger &log) +{ + int i; + int nr_fold = 5; + double *ymv = Malloc(double,prob->l); + double mae = 0; + + svm_parameter newparam = *param; + newparam.probability = 0; + svm_cross_validation(prob,&newparam,nr_fold,ymv, log); + for(i=0;il;i++) + { + ymv[i]=prob->y[i]-ymv[i]; + mae += fabs(ymv[i]); + } + mae /= prob->l; + double std=sqrt(2*mae*mae); + int count=0; + mae=0; + for(i=0;il;i++) + if (fabs(ymv[i]) > 5*std) + count=count+1; + else + mae+=fabs(ymv[i]); + mae /= (prob->l-count); + LOG(log, LogLevel::DEBUG_LEVEL, + "Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma= " + to_string(mae)); + free(ymv); + return mae; +} + + +// label: label name, start: begin of each class, count: #data of classes, perm: indices to the original data +// perm, length l, must be allocated before calling this subroutine +static void svm_group_classes(const svm_problem *prob, int *nr_class_ret, int **label_ret, int **start_ret, int **count_ret, int *perm) +{ + int l = prob->l; + int max_nr_class = 16; + int nr_class = 0; + int *label = Malloc(int,max_nr_class); + int *count = Malloc(int,max_nr_class); + int *data_label = Malloc(int,l); + int i; + + for(i=0;iy[i]; + int j; + for(j=0;jparam = *param; + model->free_sv = 0; // XXX + //gmum.r + int iter; + + if(param->svm_type == ONE_CLASS || + param->svm_type == EPSILON_SVR || + param->svm_type == NU_SVR) + { + // regression or one-class-svm + model->nr_class = 2; + model->label = NULL; + model->nSV = NULL; + model->probA = NULL; model->probB = NULL; + model->sv_coef = Malloc(double *,1); + + if(param->probability && + (param->svm_type == EPSILON_SVR || + param->svm_type == NU_SVR)) + { + model->probA = Malloc(double,1); + model->probA[0] = svm_svr_probability(prob,param, log); + } + + + decision_function f = svm_train_one(prob,param,0,0, log, iter); + model->rho = Malloc(double,1); + model->rho[0] = f.rho; + + int nSV = 0; + int i; + for(i=0;il;i++) + if(fabs(f.alpha[i]) > 0) ++nSV; + model->l = nSV; + model->SV = Malloc(svm_node *,nSV); + model->sv_coef[0] = Malloc(double,nSV); + model->sv_indices = Malloc(int,nSV); + int j = 0; + for(i=0;il;i++) + if(fabs(f.alpha[i]) > 0) + { + model->SV[j] = prob->x[i]; + model->sv_coef[0][j] = f.alpha[i]; + model->sv_indices[j] = i+1; + ++j; + } + + free(f.alpha); + } + else + { + // classification + int l = prob->l; + int nr_class; + int *label = NULL; + int *start = NULL; + int *count = NULL; + int *perm = Malloc(int,l); + + // group training data of the same class + svm_group_classes(prob,&nr_class,&label,&start,&count,perm); + if(nr_class == 1) + LOG(log, LogLevel::WARNING_LEVEL, "WARNING_LEVEL: training data in only one class. See README for details."); + svm_node **x = Malloc(svm_node *,l); + int i; + for(i=0;ix[perm[i]]; + + // calculate weighted C + + double *weighted_C = Malloc(double, nr_class); + for(i=0;iC; + for(i=0;inr_weight;i++) + { + int j; + for(j=0;jweight_label[i] == label[j]) + break; + if(j == nr_class) { + LOG(log, LogLevel::WARNING_LEVEL, "WARNING_LEVEL: class label " + to_string(param->weight_label[i]) + " specified in weight is not found") + } else { + weighted_C[j] *= param->weight[i]; + } + } + + // train k*(k-1)/2 models + + bool *nonzero = Malloc(bool,l); + for(i=0;iprobability) + { + probA=Malloc(double,nr_class*(nr_class-1)/2); + probB=Malloc(double,nr_class*(nr_class-1)/2); + } + + int p = 0; + for(i=0;iprobability) + svm_binary_svc_probability(&sub_prob,param,weighted_C[i],weighted_C[j],probA[p],probB[p],log); + + f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j], log, iter); + for(k=0;k 0) + nonzero[si+k] = true; + for(k=0;k 0) + nonzero[sj+k] = true; + free(sub_prob.x); + free(sub_prob.y); + ++p; + } + + // build output + //gmum.r + model->iter = iter; + model->nr_class = nr_class; + + model->label = Malloc(int,nr_class); + for(i=0;ilabel[i] = label[i]; + + model->rho = Malloc(double,nr_class*(nr_class-1)/2); + for(i=0;irho[i] = f[i].rho; + + if(param->probability) + { + model->probA = Malloc(double,nr_class*(nr_class-1)/2); + model->probB = Malloc(double,nr_class*(nr_class-1)/2); + for(i=0;iprobA[i] = probA[i]; + model->probB[i] = probB[i]; + } + } + else + { + model->probA=NULL; + model->probB=NULL; + } + + int total_sv = 0; + int *nz_count = Malloc(int,nr_class); + model->nSV = Malloc(int,nr_class); + for(i=0;inSV[i] = nSV; + nz_count[i] = nSV; + } + + LOG(log, LogLevel::DEBUG_LEVEL, "Total nSV = " + to_string(total_sv)); + + model->l = total_sv; + model->SV = Malloc(svm_node *,total_sv); + model->sv_indices = Malloc(int,total_sv); + p = 0; + for(i=0;iSV[p] = x[i]; + model->sv_indices[p++] = perm[i] + 1; + } + + int *nz_start = Malloc(int,nr_class); + nz_start[0] = 0; + for(i=1;isv_coef = Malloc(double *,nr_class-1); + for(i=0;isv_coef[i] = Malloc(double,total_sv); + + p = 0; + for(i=0;isv_coef[j-1][q++] = f[p].alpha[k]; + q = nz_start[j]; + for(k=0;ksv_coef[i][q++] = f[p].alpha[ci+k]; + ++p; + } + + LOG(log, LogLevel::TRACE_LEVEL, "Written SV"); + + free(label); + free(probA); + free(probB); + free(count); + free(perm); + free(start); + free(x); + free(weighted_C); + free(nonzero); + for(i=0;il; + int *perm = Malloc(int,l); + int nr_class; + if (nr_fold > l) + { + nr_fold = l; + C_FPRINTF(stderr,"WARNING: # folds > # data. Will use # folds = # data instead (i.e., leave-one-out cross validation)\n"); + } + fold_start = Malloc(int,nr_fold+1); + // stratified cv may not give leave-one-out rate + // Each class to l folds -> some folds may have zero elements + if((param->svm_type == C_SVC || + param->svm_type == NU_SVC) && nr_fold < l) + { + int *start = NULL; + int *label = NULL; + int *count = NULL; + svm_group_classes(prob,&nr_class,&label,&start,&count,perm); + + // random shuffle and then data grouped by fold using the array perm + int *fold_count = Malloc(int,nr_fold); + int c; + int *index = Malloc(int,l); + for(i=0;ix[perm[j]]; + subprob.y[k] = prob->y[perm[j]]; + ++k; + } + for(j=end;jx[perm[j]]; + subprob.y[k] = prob->y[perm[j]]; + ++k; + } + struct svm_model *submodel = svm_train(&subprob,param,log); + if(param->probability && + (param->svm_type == C_SVC || param->svm_type == NU_SVC)) + { + double *prob_estimates=Malloc(double,svm_get_nr_class(submodel, log)); + for(j=begin;jx[perm[j]],prob_estimates, log); + free(prob_estimates); + } + else + for(j=begin;jx[perm[j]], log); + svm_free_and_destroy_model(&submodel, log); + free(subprob.x); + free(subprob.y); + } + free(fold_start); + free(perm); +} + + +int svm_get_svm_type(const svm_model *model, Logger &log) +{ + return model->param.svm_type; +} + +int svm_get_nr_class(const svm_model *model, Logger &log) +{ + return model->nr_class; +} + +void svm_get_labels(const svm_model *model, int* label, Logger &log) +{ + if (model->label != NULL) + for(int i=0;inr_class;i++) + label[i] = model->label[i]; +} + +void svm_get_sv_indices(const svm_model *model, int* indices, Logger &log) +{ + if (model->sv_indices != NULL) + for(int i=0;il;i++) + indices[i] = model->sv_indices[i]; +} + +int svm_get_nr_sv(const svm_model *model, Logger &log) +{ + return model->l; +} + +double svm_get_svr_probability(const svm_model *model, Logger &log) +{ + if ((model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) && + model->probA!=NULL) + return model->probA[0]; + else + { + C_FPRINTF(stderr,"Model doesn't contain information for SVR probability inference\n"); + return 0; + } +} + +double svm_predict_values(const svm_model *model, const svm_node *x, double* dec_values, Logger &log) +{ + int i; + if(model->param.svm_type == ONE_CLASS || + model->param.svm_type == EPSILON_SVR || + model->param.svm_type == NU_SVR) + { + double *sv_coef = model->sv_coef[0]; + double sum = 0; + for(i=0;il;i++) + sum += sv_coef[i] * Kernel::k_function(x,model->SV[i],model->param); + sum -= model->rho[0]; + *dec_values = sum; + + if(model->param.svm_type == ONE_CLASS) + return (sum>0)?1:-1; + else + return sum; + } + else + { + int nr_class = model->nr_class; + int l = model->l; + + double *kvalue = Malloc(double,l); + for(i=0;iSV[i],model->param); + + int *start = Malloc(int,nr_class); + start[0] = 0; + for(i=1;inSV[i-1]; + + int *vote = Malloc(int,nr_class); + for(i=0;inSV[i]; + int cj = model->nSV[j]; + + int k; + double *coef1 = model->sv_coef[j-1]; + double *coef2 = model->sv_coef[i]; + for(k=0;krho[p]; + dec_values[p] = sum; + + if(dec_values[p] > 0) + ++vote[i]; + else + ++vote[j]; + p++; + } + + int vote_max_idx = 0; + for(i=1;i vote[vote_max_idx]) + vote_max_idx = i; + + free(kvalue); + free(start); + free(vote); + return model->label[vote_max_idx]; + } +} + +double svm_predict(const svm_model *model, const svm_node *x, Logger &log) +{ + int nr_class = model->nr_class; + double *dec_values; + if(model->param.svm_type == ONE_CLASS || + model->param.svm_type == EPSILON_SVR || + model->param.svm_type == NU_SVR) + dec_values = Malloc(double, 1); + else + dec_values = Malloc(double, nr_class*(nr_class-1)/2); + double pred_result = svm_predict_values(model, x, dec_values, log); + free(dec_values); + return pred_result; +} + +double svm_predict_probability( + const svm_model *model, const svm_node *x, double *prob_estimates, Logger &log) +{ + if ((model->param.svm_type == C_SVC || model->param.svm_type == NU_SVC) && + model->probA!=NULL && model->probB!=NULL) + { + int i; + int nr_class = model->nr_class; + double *dec_values = Malloc(double, nr_class*(nr_class-1)/2); + svm_predict_values(model, x, dec_values, log); + + double min_prob=1e-7; + double **pairwise_prob=Malloc(double *,nr_class); + for(i=0;iprobA[k],model->probB[k]),min_prob),1-min_prob); + pairwise_prob[j][i]=1-pairwise_prob[i][j]; + k++; + } + multiclass_probability(nr_class,pairwise_prob,prob_estimates, log); + + int prob_max_idx = 0; + for(i=1;i prob_estimates[prob_max_idx]) + prob_max_idx = i; + for(i=0;ilabel[prob_max_idx]; + } + else + return svm_predict(model, x, log); +} + +static const char *svm_type_table[] = +{ + "c_svc","nu_svc","one_class","epsilon_svr","nu_svr",NULL +}; + +static const char *kernel_type_table[]= +{ + "linear","polynomial","rbf","sigmoid","precomputed",NULL +}; + +int svm_save_model(const char *model_file_name, const svm_model *model, Logger &log) +{ + FILE *fp = fopen(model_file_name,"w"); + if(fp==NULL) return -1; + + char *old_locale = strdup(setlocale(LC_ALL, NULL)); + setlocale(LC_ALL, "C"); + + const svm_parameter& param = model->param; + + C_FPRINTF(fp,"svm_type %s\n", svm_type_table[param.svm_type]); + C_FPRINTF(fp,"kernel_type %s\n", kernel_type_table[param.kernel_type]); + + if(param.kernel_type == POLY) + C_FPRINTF(fp,"degree %d\n", param.degree); + + if(param.kernel_type == POLY || param.kernel_type == RBF || param.kernel_type == SIGMOID) + C_FPRINTF(fp,"gamma %g\n", param.gamma); + + if(param.kernel_type == POLY || param.kernel_type == SIGMOID) + C_FPRINTF(fp,"coef0 %g\n", param.coef0); + + int nr_class = model->nr_class; + int l = model->l; + C_FPRINTF(fp, "nr_class %d\n", nr_class); + C_FPRINTF(fp, "total_sv %d\n",l); + + { + C_FPRINTF(fp, "rho"); + for(int i=0;irho[i]); + C_FPRINTF(fp, "\n"); + } + + if(model->label) + { + C_FPRINTF(fp, "label"); + for(int i=0;ilabel[i]); + C_FPRINTF(fp, "\n"); + } + + if(model->probA) // regression has probA only + { + C_FPRINTF(fp, "probA"); + for(int i=0;iprobA[i]); + C_FPRINTF(fp, "\n"); + } + if(model->probB) + { + C_FPRINTF(fp, "probB"); + for(int i=0;iprobB[i]); + C_FPRINTF(fp, "\n"); + } + + if(model->nSV) + { + C_FPRINTF(fp, "nr_sv"); + for(int i=0;inSV[i]); + C_FPRINTF(fp, "\n"); + } + + C_FPRINTF(fp, "SV\n"); + const double * const *sv_coef = model->sv_coef; + const svm_node * const *SV = model->SV; + + for(int i=0;ivalue)); + } else + while(p->index != -1) + { + C_FPRINTF(fp,"%d:%.8g ",p->index,p->value); + p++; + } + C_FPRINTF(fp, "\n"); + } + + setlocale(LC_ALL, old_locale); + free(old_locale); + + if (ferror(fp) != 0 || fclose(fp) != 0) return -1; + else return 0; +} + +static char *line = NULL; +static int max_line_len; + +static char* readline(FILE *input) +{ + int len; + + if(fgets(line,max_line_len,input) == NULL) + return NULL; + + while(strrchr(line,'\n') == NULL) + { + max_line_len *= 2; + line = (char *) realloc(line,max_line_len); + len = (int) strlen(line); + if(fgets(line+len,max_line_len-len,input) == NULL) + break; + } + return line; +} + +// +// FSCANF helps to handle fscanf failures. +// Its do-while block avoids the ambiguity when +// if (...) +// FSCANF(); +// is used +// +#define FSCANF(_stream, _format, _var) do{ if (fscanf(_stream, _format, _var) != 1) return false; }while(0) +bool read_model_header(FILE *fp, svm_model* model, Logger &log) +{ + svm_parameter& param = model->param; + char cmd[81]; + while(1) + { + FSCANF(fp,"%80s",cmd); + + if(strcmp(cmd,"svm_type")==0) + { + FSCANF(fp,"%80s",cmd); + int i; + for(i=0;svm_type_table[i];i++) + { + if(strcmp(svm_type_table[i],cmd)==0) + { + param.svm_type=i; + break; + } + } + if(svm_type_table[i] == NULL) + { + C_FPRINTF(stderr,"unknown svm type.\n"); + return false; + } + } + else if(strcmp(cmd,"kernel_type")==0) + { + FSCANF(fp,"%80s",cmd); + int i; + for(i=0;kernel_type_table[i];i++) + { + if(strcmp(kernel_type_table[i],cmd)==0) + { + param.kernel_type=i; + break; + } + } + if(kernel_type_table[i] == NULL) + { + C_FPRINTF(stderr,"unknown kernel function.\n"); + return false; + } + } + else if(strcmp(cmd,"degree")==0) + FSCANF(fp,"%d",¶m.degree); + else if(strcmp(cmd,"gamma")==0) + FSCANF(fp,"%lf",¶m.gamma); + else if(strcmp(cmd,"coef0")==0) + FSCANF(fp,"%lf",¶m.coef0); + else if(strcmp(cmd,"nr_class")==0) + FSCANF(fp,"%d",&model->nr_class); + else if(strcmp(cmd,"total_sv")==0) + FSCANF(fp,"%d",&model->l); + else if(strcmp(cmd,"rho")==0) + { + int n = model->nr_class * (model->nr_class-1)/2; + model->rho = Malloc(double,n); + for(int i=0;irho[i]); + } + else if(strcmp(cmd,"label")==0) + { + int n = model->nr_class; + model->label = Malloc(int,n); + for(int i=0;ilabel[i]); + } + else if(strcmp(cmd,"probA")==0) + { + int n = model->nr_class * (model->nr_class-1)/2; + model->probA = Malloc(double,n); + for(int i=0;iprobA[i]); + } + else if(strcmp(cmd,"probB")==0) + { + int n = model->nr_class * (model->nr_class-1)/2; + model->probB = Malloc(double,n); + for(int i=0;iprobB[i]); + } + else if(strcmp(cmd,"nr_sv")==0) + { + int n = model->nr_class; + model->nSV = Malloc(int,n); + for(int i=0;inSV[i]); + } + else if(strcmp(cmd,"SV")==0) + { + while(1) + { + int c = getc(fp); + if(c==EOF || c=='\n') break; + } + break; + } + else + { + C_FPRINTF(stderr,"unknown text in model file: [%s]\n",cmd); + return false; + } + } + + return true; + +} + +svm_model *svm_load_model(const char *model_file_name, Logger &log) +{ + FILE *fp = fopen(model_file_name,"rb"); + if(fp==NULL) return NULL; + + char *old_locale = strdup(setlocale(LC_ALL, NULL)); + setlocale(LC_ALL, "C"); + + // read parameters + + svm_model *model = Malloc(svm_model,1); + model->rho = NULL; + model->probA = NULL; + model->probB = NULL; + model->sv_indices = NULL; + model->label = NULL; + model->nSV = NULL; + + // read header + if (!read_model_header(fp, model, log)) + { + C_FPRINTF(stderr, "ERROR: fscanf failed to read model\n"); + setlocale(LC_ALL, old_locale); + free(old_locale); + free(model->rho); + free(model->label); + free(model->nSV); + free(model); + return NULL; + } + + // read sv_coef and SV + + int elements = 0; + long pos = ftell(fp); + + max_line_len = 1024; + line = Malloc(char,max_line_len); + char *p,*endptr,*idx,*val; + + while(readline(fp)!=NULL) + { + p = strtok(line,":"); + while(1) + { + p = strtok(NULL,":"); + if(p == NULL) + break; + ++elements; + } + } + elements += model->l; + + fseek(fp,pos,SEEK_SET); + + int m = model->nr_class - 1; + int l = model->l; + model->sv_coef = Malloc(double *,m); + int i; + for(i=0;isv_coef[i] = Malloc(double,l); + model->SV = Malloc(svm_node*,l); + svm_node *x_space = NULL; + if(l>0) x_space = Malloc(svm_node,elements); + + int j=0; + for(i=0;iSV[i] = &x_space[j]; + + p = strtok(line, " \t"); + model->sv_coef[0][i] = strtod(p,&endptr); + for(int k=1;ksv_coef[k][i] = strtod(p,&endptr); + } + + while(1) + { + idx = strtok(NULL, ":"); + val = strtok(NULL, " \t"); + + if(val == NULL) + break; + x_space[j].index = (int) strtol(idx,&endptr,10); + x_space[j].value = strtod(val,&endptr); + + ++j; + } + x_space[j++].index = -1; + } + free(line); + + setlocale(LC_ALL, old_locale); + free(old_locale); + + if (ferror(fp) != 0 || fclose(fp) != 0) + return NULL; + + model->free_sv = 1; // XXX + return model; +} + +void svm_free_model_content(svm_model* model_ptr, Logger &log) +{ + if(model_ptr->free_sv && model_ptr->l > 0 && model_ptr->SV != NULL) + free((void *)(model_ptr->SV[0])); + if(model_ptr->sv_coef) + { + for(int i=0;inr_class-1;i++) + free(model_ptr->sv_coef[i]); + } + + free(model_ptr->SV); + model_ptr->SV = NULL; + + free(model_ptr->sv_coef); + model_ptr->sv_coef = NULL; + + free(model_ptr->rho); + model_ptr->rho = NULL; + + free(model_ptr->label); + model_ptr->label= NULL; + + free(model_ptr->probA); + model_ptr->probA = NULL; + + free(model_ptr->probB); + model_ptr->probB= NULL; + + free(model_ptr->sv_indices); + model_ptr->sv_indices = NULL; + + free(model_ptr->nSV); + model_ptr->nSV = NULL; +} + +void svm_free_and_destroy_model(svm_model** model_ptr_ptr, Logger &log) +{ + if(model_ptr_ptr != NULL && *model_ptr_ptr != NULL) + { + svm_free_model_content(*model_ptr_ptr, log); + free(*model_ptr_ptr); + *model_ptr_ptr = NULL; + } +} + +void svm_destroy_param(svm_parameter* param, Logger &log) +{ + free(param->weight_label); + free(param->weight); +} + +const char *svm_check_parameter(const svm_problem *prob, const svm_parameter *param, Logger &log) +{ + // svm_type + + int svm_type = param->svm_type; + if(svm_type != C_SVC && + svm_type != NU_SVC && + svm_type != ONE_CLASS && + svm_type != EPSILON_SVR && + svm_type != NU_SVR) + return "unknown svm type"; + + // kernel_type, degree + + int kernel_type = param->kernel_type; + if(kernel_type != LINEAR && + kernel_type != POLY && + kernel_type != RBF && + kernel_type != SIGMOID && + kernel_type != PRECOMPUTED) + return "unknown kernel type"; + + if(param->gamma < 0) + return "gamma < 0"; + + if(param->degree < 0) + return "degree of polynomial kernel < 0"; + + // cache_size,eps,C,nu,p,shrinking + + if(param->cache_size <= 0) + return "cache_size <= 0"; + + if(param->eps <= 0) + return "eps <= 0"; + + if(svm_type == C_SVC || + svm_type == EPSILON_SVR || + svm_type == NU_SVR) + if(param->C <= 0) + return "C <= 0"; + + if(svm_type == NU_SVC || + svm_type == ONE_CLASS || + svm_type == NU_SVR) + if(param->nu <= 0 || param->nu > 1) + return "nu <= 0 or nu > 1"; + + if(svm_type == EPSILON_SVR) + if(param->p < 0) + return "p < 0"; + + if(param->shrinking != 0 && + param->shrinking != 1) + return "shrinking != 0 and shrinking != 1"; + + if(param->probability != 0 && + param->probability != 1) + return "probability != 0 and probability != 1"; + + if(param->probability == 1 && + svm_type == ONE_CLASS) + return "one-class SVM probability output not supported yet"; + + // gmum.r modificatipn + if(param->max_iter < -1) + return "max_iter < -1"; + + + // check whether nu-svc is feasible + + if(svm_type == NU_SVC) + { + int l = prob->l; + int max_nr_class = 16; + int nr_class = 0; + int *label = Malloc(int,max_nr_class); + int *count = Malloc(int,max_nr_class); + + int i; + for(i=0;iy[i]; + int j; + for(j=0;jnu*(n1+n2)/2 > min(n1,n2)) + { + free(label); + free(count); + return "specified nu is infeasible"; + } + } + } + free(label); + free(count); + } + + return NULL; +} + +int svm_check_probability_model(const svm_model *model, Logger &log) +{ + return ((model->param.svm_type == C_SVC || model->param.svm_type == NU_SVC) && + model->probA!=NULL && model->probB!=NULL) || + ((model->param.svm_type == EPSILON_SVR || model->param.svm_type == NU_SVR) && + model->probA!=NULL); +} diff --git a/src/svm/svm_basic.cpp b/src/svm/svm_basic.cpp new file mode 100644 index 00000000..2037e7a1 --- /dev/null +++ b/src/svm/svm_basic.cpp @@ -0,0 +1,231 @@ +#include "svm_basic.h" +#include "svm.h" +#include "utils/utils.h" + +// SVM Configuration +// Constructors +SVMConfiguration::SVMConfiguration(): label(0), nSV(0), libsvm_class_weights(0) { + this->prediction = false; + SVMConfiguration::setDefaultParams(); +} + +int SVMConfiguration::getDataExamplesNumber() { + if(isSparse()) { + return this->sparse_data.n_cols; + } else { + return this->data.n_rows; + } +} + +void SVMConfiguration::setClassWeights(arma::vec class_weights){ + this->class_weights = class_weights; + this->use_class_weights = true; + class_weight_length = class_weights.size(); + + if(libsvm_class_weights){ + delete[] libsvm_class_weights; + } + if(libsvm_class_weights_labels){ + delete[] libsvm_class_weights_labels; + } + libsvm_class_weights = new double[class_weight_length]; + libsvm_class_weights_labels = new int[class_weight_length]; + + if(this->class_weight_length != 2){ + COUT("SVMLight doesn't support multiclass classification. Please pass two class weights. \n"); + EXIT(1); + } + + libsvm_class_weights[0] = class_weights(0); + libsvm_class_weights_labels[0] = -1; + libsvm_class_weights[1] = class_weights(1); + libsvm_class_weights_labels[1] = 1; +} + +int SVMConfiguration::getDataDim() { + if(isSparse()) { + return this->sparse_data.n_rows; + } else { + return this->data.n_cols; + } +} + +size_t SVMConfiguration::getSVCount() { + return this->support_vectors.n_cols; +} + +SVMConfiguration::SVMConfiguration(bool prediction) { + this->prediction = prediction; +} + +void SVMConfiguration::setSparse(bool sparse) { + this->sparse = sparse; +} + +void SVMConfiguration::setSparseData( + arma::uvec rowptr, + arma::uvec colind, + arma::vec values, + size_t nrow, + size_t ncol, + bool one_indexed +) { + // rowind and colptr are one-indexed -- we are sad + if (one_indexed) { + for (size_t i=0; i < rowptr.size(); ++i) { + rowptr[i] -= 1; + } + for (size_t i=0; i < colind.size(); ++i) { + colind[i] -= 1; + } + } + + // THIS IS TRICKY: + // we are using fact that CSR format for A is CSC format for A^T + this->sparse_data = arma::sp_mat(colind, rowptr, values, ncol, nrow); +} + +arma::sp_mat &SVMConfiguration::getSparseData() { + return this->sparse_data; +} + +bool SVMConfiguration::isSparse() { + return this->sparse; +} + +void SVMConfiguration::setFilename(std::string filename) { + this->filename = filename; +} + +std::string SVMConfiguration::getFilename() { + return this->filename; +} + +void SVMConfiguration::setModelFilename(std::string filename) { + this->model_filename = filename; +} + +std::string SVMConfiguration::getModelFilename() { + return this->model_filename; +} + +void SVMConfiguration::setData(arma::mat data) { + this->data = data; +} + +arma::mat SVMConfiguration::getData() { + return this->data; +} + +void SVMConfiguration::setOutputFilename(std::string filename) { + this->output_filename = filename; +} +std::string SVMConfiguration::getOutputFilename() { + return this->output_filename; +} + +bool SVMConfiguration::isPrediction() { + return this->prediction; +} + +void SVMConfiguration::setPrediction(bool prediction) { + this->prediction = prediction; +} + +void SVMConfiguration::setLibrary( std::string library ) { + if ( library == "libsvm" ) { + this->library = LIBSVM; + this->svm_type = C_SVC; + } + else if (library == "svmlight" ) { + this->library = SVMLIGHT; + this->svm_type = C_SVC; + } +} + +void SVMConfiguration::setKernel( std::string kernel ) { + if ( kernel == "linear" ) { + this->kernel_type = _LINEAR; + } + else if ( kernel == "poly" ) { + this->kernel_type = _POLY; + } + else if ( kernel == "rbf" ) { + this->kernel_type = _RBF; + } + else if ( kernel == "sigmoid" ) { + this->kernel_type = _SIGMOID; + } +} + +void SVMConfiguration::setPreprocess( std::string preprocess ) { + if ( preprocess == "2e" ) { + this->preprocess = TWOE; + } + else if ( preprocess == "none" ) { + this->preprocess = NONE; + } +} + +void SVMConfiguration::set_verbosity(int verbosity){ + this->log.verbosity = verbosity; +} + +double SVMConfiguration::getB() { + return b; +} + +void SVMConfiguration::setSeed(int seed){ + ed_c_srand(seed); + this->seed = seed; +} + +void SVMConfiguration::setB(double b) { + this->b = b; +} + +void SVMConfiguration::setDefaultParams() { + library = LIBSVM; + svm_type = C_SVC; + kernel_type = _LINEAR; + preprocess = NONE; + degree = 3; + gamma = 0; // 1/num_features + coef0 = 0; + cache_size = 100; + C = 1; + eps = 1e-3; + shrinking = 1; + probability = 0; + class_weight_length = 0; + libsvm_class_weights_labels = NULL; + libsvm_class_weights = NULL; + cov_eps_smoothing_start = 2.22e-16; //2^(-23) + //cov_eps_smoothing_start = + + //TODO: Probably not necessery + nu = 0.5; + p = 0.1; + + + use_transductive_learning = false; + transductive_posratio = -1; + + // Sparse data + sparse = false; + + // Additional features + use_example_weights = false; + use_class_weights = false; + + // User-defined classification mode labels + // (will be filled during data processing) + neg_target = 0; + pos_target = 0; + + debug_library_predict = false; + + max_iter = 10000000; + iter = -1; +} + diff --git a/src/svm/svm_client.cpp b/src/svm/svm_client.cpp new file mode 100644 index 00000000..803599fb --- /dev/null +++ b/src/svm/svm_client.cpp @@ -0,0 +1,487 @@ +#include + +#include "svm_client.h" +#include "libsvm_runner.h" +#include "svmlight_runner.h" +#include "two_e_svm_pre.h" +#include "two_e_svm_post.h" +#include "svm_utils.h" + + +const std::string __file__ = "svm_client.cpp"; +const std::string __client_class__ = "SVMClient"; +const std::string __debug_prefix__ = __file__ + "." + __client_class__; + + +// Constructor +SVMClient::SVMClient(SVMConfiguration *config): config(*config) {} + +void SVMClient::setLibrary(std::string library){ + config.setLibrary(library); +} +void SVMClient::setKernel(std::string kernel){ + config.setKernel(kernel); +} +void SVMClient::setPreprocess(std::string prep){ + config.setPreprocess(prep); +} + +void SVMClient::setCacheSize(double cache) { + config.cache_size = cache; +} +void SVMClient::setDegree(int degree){ + config.degree = degree; +} +void SVMClient::setGamma(double gamma){ + config.gamma = gamma; +} +void SVMClient::setCoef0(double coef0 ){ + config.coef0 = coef0; +} +void SVMClient::setC(double C){ + config.C = C; +} +void SVMClient::setEps(double eps){ + config.eps = eps; +} +void SVMClient::setShrinking(int sh){ + config.shrinking = sh; +} +void SVMClient::setProbability(int prob){ + config.probability = prob; +} + +void SVMClient::setConfiguration(SVMConfiguration *config) { + SVMConfiguration current_config = *config; + this->config = current_config; +} + +void SVMClient::setNumberClass(int nr_class){ + if(nr_class < 1){ + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string("There needs to be at least 1 class")); + } + else { + config.nr_class = nr_class; + } +} + +void SVMClient::setW(arma::vec new_w){ + if (config.kernel_type != _LINEAR) { + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string("Decision boundary is not available with non-linear kernel")); + } + else if (new_w.n_elem != config.w.n_elem) { + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string("Vectors are of different length")); + } + else + config.w = arma::sp_mat(new_w.n_elem,1); + for (int i = 0; i != new_w.n_elem; ++i) { + if (new_w(i) != 0) config.w(i,0) = new_w(i); + } +} + +void SVMClient::setAlpha(arma::vec new_alpha){ + if (new_alpha.n_elem != config.alpha_y.n_elem){ + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string("Vectors are of different length")); + } + else { + config.alpha_y = new_alpha; + } +} + + +// Getters +arma::mat SVMClient::getX(){ + return config.data; +} +arma::sp_mat SVMClient::getSparseX(){ + return config.sparse_data; +} +arma::vec SVMClient::getY(){ + return config.target; +} +arma::vec SVMClient::getPrediction() { + return SVMClient::config.result; +} +arma::vec SVMClient::getDecisionFunction() { + return SVMClient::config.decision_function; +} +std::string SVMClient::getLibrary(){ + switch(config.library) { + case LIBSVM : return "libsvm"; + case SVMLIGHT : return "svmlight"; + default : return "error"; + } +} +std::string SVMClient::getKernel(){ + switch(config.kernel_type) { + case _LINEAR : return "linear"; + case _POLY : return "poly"; + case _RBF : return "rbf"; + case _SIGMOID : return "sigmoid"; + default : return "error"; + } +} +std::string SVMClient::getPreprocess() { + switch(config.preprocess) { + case TWOE : return "2e"; + case NONE : return "none"; + default : return "error"; + } +} +double SVMClient::getCacheSize(){ + return config.cache_size; +} +int SVMClient::getDegree(){ + return config.degree; +} +double SVMClient::getGamma(){ + return config.gamma; +} +double SVMClient::getCoef0(){ + return config.coef0; +} +double SVMClient::getC(){ + return config.C; +} +double SVMClient::getEps(){ + return config.eps; +} +bool SVMClient::isShrinking(){ + return (bool)config.shrinking; +} +bool SVMClient::isProbability(){ + return (bool)config.probability; +} + +bool SVMClient::isSparse() { + return (bool)config.isSparse(); +} + +arma::vec SVMClient::getExampleWeights(){ + return config.example_weights; +} + +arma::vec SVMClient::getClassWeights(){ + return config.class_weights; +} + + + +bool +SVMClient::areExamplesWeighted() { + return (bool)config.use_example_weights; +} +// model getters +arma::vec SVMClient::getAlpha() { + return arma::vec(config.alpha_y); +} + +void SVMClient::setBias(double bias) { + config.setB(bias); +} + +double SVMClient::getBias() { + return config.getB(); +} + +arma::vec SVMClient::getW() { + if ( config.kernel_type == _LINEAR ) { + return arma::vec(config.w); + } + else { + LOG(config.log, LogLevel::ERR_LEVEL, "ERROR: " + to_string("w is not available with non-linear kernel")); + return 0; + } +} + +int SVMClient::getNumberSV() { + return config.getSVCount(); +} + +int SVMClient::getNumberClass() { + return config.nr_class; +} + +arma::sp_mat SVMClient::getSV(){ + // FIXME: Workaround for R interface + return config.support_vectors.t(); +} + +int SVMClient::getIterations(){ + return config.iter; +} + +SVMConfiguration &SVMClient::getConfiguration() { + return this->config; +} + +// Runners +void SVMClient::run() { + SVMClient::createFlow(); + for (std::vector::iterator iter = SVMHandlers.begin(); + iter != SVMHandlers.end(); ++iter) { + (*iter)->processRequest(config); + } +} + +void SVMClient::train() { + config.setPrediction(false); + run(); +} + +void SVMClient::predict( arma::mat problem ) { + //FIXME: wtf!! - why I do I have to make 4 copies just to make a prediction.. seriously + //#466 and #465.. + + arma::mat previous_data = std::move(config.data); + config.setData(problem); + predictFromConfig(); + config.setData(std::move(previous_data)); +} + +void SVMClient::predictFromConfig() { + LOG(config.log, LogLevel::DEBUG_LEVEL, __debug_prefix__ + ".predictFromConfig() Started."); + + // Number of docs is a number of rows in data matrix + size_t n_docs = config.getDataExamplesNumber(); + config.result = arma::zeros(n_docs); + config.decision_function = arma::zeros(n_docs); + + + LOG(config.log, LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".predictFromConfig() Calculating prediction on " + + svm_to_str(n_docs) + " documents..."); + + LOG(config.log, LogLevel::TRACE_LEVEL, + __debug_prefix__ + ".predictFromConfig() Calculating prediction onto " + +svm_to_str(config.pos_target) + " "+svm_to_str(config.neg_target)+ + ".."); + + if(config.kernel_type == ::_LINEAR){ + LOG(config.log, LogLevel::TRACE_LEVEL, + __debug_prefix__ + ".predictFromConfig() using linear prediction"); + } + + // Prediction itself + // math: + // f(x) = sum{alpha_j * y_j * kernel(x_j, x)} + b, where j means j-th SV} + for (int i=0; i < n_docs; ++i) { + double doc_result = 0; + // For linear kernel it is possible to speed up + if(config.kernel_type == ::_LINEAR){ + if (isSparse()) { + doc_result = arma::dot( + config.getSparseData().col(i), + config.w + ); + } else { + doc_result = arma::dot( + config.data.row(i), + config.w.t() + ); + } + } else { + for (int j=0; j < config.getSVCount(); ++j) { + doc_result += kernel(i, j) * config.alpha_y(j); + } + } + config.decision_function[i] = doc_result + config.b; +// LOG(config.log, 5, "Decision function "+svm_to_str(doc_result)); + } + + if(n_docs){ + LOG(config.log, LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".predictFromConfig() Coverting to labels, first" + "result is " + svm_to_str(config.result[0])+ + "..."); + + } + // Convert results to userdefined labels + n_docs = config.result.n_rows; + double doc_result = 0; + for (int i=0; i < n_docs; ++i) { + doc_result = config.decision_function[i]; + + // Store user-defined label + if (doc_result > 0) { + config.result[i] = config.pos_target; + } else { + config.result[i] = config.neg_target; + } + } + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".predictFromConfig() Done." + ); +} + +double SVMClient::kernel(size_t i, size_t j) { + double result=0.0; + switch (config.kernel_type) { + case _LINEAR: { + // math: kernel(x, x') = x^T * x' + // libsvm: kernel(v, u') = u'*v + // svmlight: kernel(b, a) = a*b + if (isSparse()) { + result = arma::dot( + config.getSparseData().col(i), + config.support_vectors.col(j) + ); + } else { + result = arma::dot( + config.data.row(i), + config.support_vectors.col(j).t() + ); + } + break; + } + case _POLY: { + // libsvm: kernel(v, u') = (gamma*u'*v + coef0)^degree + // svmlight: kernel(b, a) = (s a*b+c)^d + if (isSparse()) { + result = arma::dot( + config.getSparseData().col(i), + config.support_vectors.col(j) + ); + } else { + result = arma::dot( + config.data.row(i), + config.support_vectors.col(j).t() + ); + } + + return pow(result * config.gamma + config.coef0, config.degree); + } + case _RBF: { + // libsvm: kernel(v, u') = exp(-gamma*|u-v|^2) + // svmlight: kernel(b, a) = exp(-gamma ||a-b||^2) + double neg_gamma = -config.gamma; + double norm = 0; + if (isSparse()) { + norm = arma::norm( + config.getSparseData().col(i) + - config.support_vectors.col(j), + 2 + ); + } else { + norm = arma::norm( + config.data.row(i).t() + - config.support_vectors.col(j), + 2 + ); + } + return exp(neg_gamma * norm * norm); + } + case _SIGMOID: { + // libsvm: kernel(v, u') = tanh(gamma*u'*v + coef0) + // svmlight: kernel(b, a) = tanh(s a*b + c) + double tanh_arg = 0; + if (isSparse()) { + tanh_arg = arma::dot( + config.getSparseData().col(i), + config.support_vectors.col(j) + ); + } else { + tanh_arg = arma::dot( + config.data.row(i), + config.support_vectors.col(j).t() + ); + } + return tanh(tanh_arg * config.gamma + config.coef0); + } + default: { + LOG( + config.log, + LogLevel::FATAL_LEVEL, + __debug_prefix__ + ".kernel() invalid kernel type!" + ); + break; + } + } + return result; +} + +void SVMClient::sparse_predict( + arma::uvec rowptr, + arma::uvec colind, + arma::vec values, + size_t n_rows, + size_t n_cols +) { + //FIXME: wtf!! - why I do I have to make 4 copies just to make a prediction.. seriously + //#466 and #465.. + arma::sp_mat previous_data = std::move(config.sparse_data); + config.setSparseData( + rowptr, + colind, + values, + n_rows, + n_cols, + true + ); + + if (config.debug_library_predict) { + config.setPrediction(true); + requestPredict(); + } else { + predictFromConfig(); + } + config.sparse_data = std::move(previous_data); +} + +void SVMClient::requestPredict() { + LOG(config.log, LogLevel::DEBUG_LEVEL, __debug_prefix__ + ".requestPredict() Started."); + if ( SVMHandlers.size() > 0 ) { + config.setPrediction(true); + for (std::vector::iterator iter = SVMHandlers.begin(); + iter != SVMHandlers.end(); ++iter) { + (*iter)->processRequest(config); + } + } + LOG(config.log, LogLevel::DEBUG_LEVEL, __debug_prefix__ + ".requestPredict() Done."); +} + +void SVMClient::createFlow() { + LOG(config.log, LogLevel::DEBUG_LEVEL, __debug_prefix__ + ".createFlow() Started."); + SVMType svm_type = config.library; + Preprocess preprocess = config.preprocess; + std::vector handlers; + + switch (svm_type) { + case LIBSVM: { + LibSVMRunner *runner = new LibSVMRunner(); + handlers.push_back(runner); + break; + } + case SVMLIGHT: { + SVMLightRunner *runner = new SVMLightRunner(); + handlers.push_back(runner); + break; + } + default: { + LibSVMRunner *runner = new LibSVMRunner(); + handlers.push_back(runner); + break; + } + } + + switch (preprocess) { + case TWOE: { + TwoeSVMPostprocessor *post_runner = new TwoeSVMPostprocessor(); + TwoeSVMPreprocessor *pre_runner = new TwoeSVMPreprocessor(); + handlers.insert( handlers.begin(), pre_runner ); + handlers.push_back( post_runner ); + break; + } + case NONE: + break; + default: + break; + } + + this->SVMHandlers = handlers; + LOG(config.log, LogLevel::DEBUG_LEVEL, __debug_prefix__ + ".createFlow() Done."); +} + diff --git a/src/svm/svm_utils.cpp b/src/svm/svm_utils.cpp new file mode 100644 index 00000000..63422d8b --- /dev/null +++ b/src/svm/svm_utils.cpp @@ -0,0 +1,33 @@ +#include "svm_utils.h" +arma::sp_mat SvmUtils::SvmNodeToArmaSpMat( + svm_node** svm_nodes, int nr_sv, int dim +) { + arma::uvec colptr(nr_sv+1); //this always has this dim + colptr(0) = 0; //always + + int non_zero = 0; + //get necessary statistics + for (int row = 0; row < nr_sv; row++) { + int j = 0; + for (j = 0; svm_nodes[row][j].index != -1; j++) ; + non_zero += j; + } + + + arma::uvec rowind(non_zero); + arma::vec values(non_zero); + + int current = 0; + for (int row = 0; row < nr_sv; row++) { + svm_node* tmp_row = svm_nodes[row]; + for (int j = 0; tmp_row[j].index != -1; j++) { + rowind[current] = tmp_row[j].index - 1; + values[current] = tmp_row[j].value; + ++current; + } + colptr(row+1) = current; + } + + //Transposed matrix of SV + return arma::sp_mat(rowind, colptr, values, dim, nr_sv); +} diff --git a/src/svm/svm_wrapper_module.cpp b/src/svm/svm_wrapper_module.cpp new file mode 100644 index 00000000..e311e01a --- /dev/null +++ b/src/svm/svm_wrapper_module.cpp @@ -0,0 +1,110 @@ +#ifndef SVM_WRAPPER_MODULE_H +#define SVM_WRAPPER_MODULE_H + +#ifdef RCPP_INTERFACE + +#include "svm_basic.h" +#include "svm_client.h" +using namespace Rcpp; + + +RCPP_EXPOSED_CLASS(SVMConfiguration) +RCPP_EXPOSED_CLASS(SVMClient) + + +RCPP_MODULE(svm_wrapper) { + + class_("SVMConfiguration") + .constructor() + + .field("x", &SVMConfiguration::data) + .field("y", &SVMConfiguration::target) + .field("result", &SVMConfiguration::result) + + .field("C", &SVMConfiguration::C) + .field("gamma", &SVMConfiguration::gamma) + .field("coef0", &SVMConfiguration::coef0) + .field("eps", &SVMConfiguration::eps) + .field("degree", &SVMConfiguration::degree) + .field("cache_size", &SVMConfiguration::cache_size) + .field("shrinking", &SVMConfiguration::shrinking) + .field("probability", &SVMConfiguration::probability) + .field("example_weights", &SVMConfiguration::example_weights) + .field( + "use_example_weights", &SVMConfiguration::use_example_weights) + .field( + "use_transductive_learning", + &SVMConfiguration::use_transductive_learning) + .field( + "transductive_posratio", + &SVMConfiguration::transductive_posratio) + .field("max_iter", &SVMConfiguration::max_iter) + .field("sparse", &SVMConfiguration::sparse) + .field("svm_options", &SVMConfiguration::svm_options) + + .method("set_sparse_data", &SVMConfiguration::setSparseData) + .method("setSeed", &SVMConfiguration::setSeed) + .method("setPrediction", &SVMConfiguration::setPrediction) + .method("setLibrary", &SVMConfiguration::setLibrary) + .method("setKernel", &SVMConfiguration::setKernel) + .method("setPreprocess", &SVMConfiguration::setPreprocess) + .method("set_verbosity", &SVMConfiguration::set_verbosity) + .method("setClassWeights", &SVMConfiguration::setClassWeights) + + ; + class_("SVMClient") + .constructor() + + .method(".setLibrary",&SVMClient::setLibrary) + .method(".setKernel", &SVMClient::setKernel) + .method(".setPreprocess",&SVMClient::setPreprocess) + .method(".setCache",&SVMClient::setCacheSize) + .method(".setDegree",&SVMClient::setDegree) + .method(".setGamma",&SVMClient::setGamma) + .method(".setCoef0",&SVMClient::setCoef0) + .method(".setC",&SVMClient::setC) + .method(".setEps",&SVMClient::setEps) + .method(".setShrinking",&SVMClient::setShrinking) + .method(".setProbability",&SVMClient::setProbability) + .method(".setBias", &SVMClient::setBias) + .method(".setW", &SVMClient::setW) + .method(".setAlpha", &SVMClient::setAlpha) + .method(".setNumberClass", &SVMClient::setNumberClass) + + .method(".getX", &SVMClient::getX) + .method(".getSparseX", &SVMClient::getSparseX) + .method(".getY", &SVMClient::getY) + .method(".getPrediction", &SVMClient::getPrediction) + .method(".getDecisionFunction", &SVMClient::getDecisionFunction) + .method(".getCore", &SVMClient::getLibrary) + .method(".getKernel", &SVMClient::getKernel) + .method(".getPreprocess", &SVMClient::getPreprocess) + .method(".getCache", &SVMClient::getCacheSize) + .method(".getDegree", &SVMClient::getDegree) + .method(".getGamma", &SVMClient::getGamma) + .method(".getCeof0", &SVMClient::getCoef0) + .method(".getC", &SVMClient::getC) + .method(".getEps", &SVMClient::getEps) + .method(".isShrinking", &SVMClient::isShrinking) + .method(".isProbability", &SVMClient::isProbability) + .method(".isSparse", &SVMClient::isSparse) + .method(".getExampleWeights", &SVMClient::getExampleWeights) + .method(".areExamplesWeighted", &SVMClient::areExamplesWeighted) + .method(".getClassWeights", &SVMClient::getClassWeights) + .method(".getAlpha", &SVMClient::getAlpha) + .method(".getBias", &SVMClient::getBias) + .method(".getW", &SVMClient::getW) + .method(".getNumberSV", &SVMClient::getNumberSV) + .method(".getNumberClass", &SVMClient::getNumberClass) + .method(".getSV", &SVMClient::getSV) + .method(".getIterations", &SVMClient::getIterations) + .method(".train", &SVMClient::train) + .method(".predict", &SVMClient::predict) + .method(".sparse_predict", &SVMClient::sparse_predict) + ; + +} + +#endif + +#endif diff --git a/src/svm/svmlight_runner.cpp b/src/svm/svmlight_runner.cpp new file mode 100644 index 00000000..1e8fa5b6 --- /dev/null +++ b/src/svm/svmlight_runner.cpp @@ -0,0 +1,1222 @@ +/** + * @file svmlight_runner.cpp + * @brief SVMLight implementation class of SVMHandler -- definitions + * @author Konrad Talik + * @copyright GPLv3 + */ + + +#include +#include +#include +#include +#include +#include + +#include "svmlight_runner.h" +#include "utils/logger.h" +#include "svm_basic.h" +#include "utils/cutils.h" +#include "utils/utils.h" + +const std::string __file__ = "svmlight_runner.cpp"; +const std::string __runner_class__ = "SVMLightRunner"; +const std::string __debug_prefix__ = __file__ + "." + __runner_class__; + + +extern long smallroundcount; +extern long roundnumber; +extern long precision_violations; +extern double *primal; +extern double *dual; +extern long precision_violations; +extern double opt_precision; +extern long maxiter; +extern double lindep_sensitivity; +extern double* buffer; +extern long* nonoptimal; + +# define DEF_PRECISION 1E-5 +# define DEF_MAX_ITERATIONS 200 +# define DEF_LINDEP_SENSITIVITY 1E-8 + +void init_global_params_QP() { + primal=0; + dual=0; + lindep_sensitivity=DEF_LINDEP_SENSITIVITY; + maxiter=DEF_MAX_ITERATIONS; + opt_precision=DEF_PRECISION; + buffer = 0; + nonoptimal = 0; + smallroundcount = 0; + roundnumber = 0; + precision_violations = 0; +} + +SVMLightRunner::SVMLightRunner() { +} + + +SVMLightRunner::~SVMLightRunner() { + _clear(); +} + + +bool SVMLightRunner::canHandle(SVMConfiguration &config) { + /// @TODO SVMLightRunner::canHandle + return true; +} + + +void SVMLightRunner::processRequest( + SVMConfiguration &config +) { + int argc = 0; + char** argv = 0; + + arma::mat unique_labels = arma::unique(config.target); + + if(unique_labels.size() !=2 && !config.use_transductive_learning){ + COUT("Passed 3 labels to svmlight without use_transductive_learning"); + EXIT(1); + } + if((unique_labels.size() <2 || unique_labels.size() > 3) && config.use_transductive_learning){ + COUT("Passed incorred # of labels to svmlight (<2 || >3) for transductive learning"); + EXIT(1); + } + if(unique_labels.size() == 2){ + if(unique_labels[0] != -1 && unique_labels[1] != 1){ + COUT("Please pass negative class as -1 and positive as 1"); + EXIT(1); + } + } + if(unique_labels.size() == 3){ + if(unique_labels[0] != 0 && unique_labels[1] != -1 + && unique_labels[2] != 1 + ){ + COUT("Please pass negative class as -1 and positive as 1"); + EXIT(1); + } + } + + config.neg_target = -1; + config.pos_target = 1; + + if (!config.svm_options.empty()) { + argc = check_argc(std::string("gmum ") + config.svm_options); + argv = to_argv(std::string("gmum ") + config.svm_options); + } + + if (!config.isPrediction()) { + // Learning + librarySVMLearnMain(argc, argv, true, config); + + } else { + // Predict + librarySVMClassifyMain(argc, argv, true, config); + // Convert sign to label + resultsToLabels(config); + } +} + + +void SVMLightRunner::resultsToLabels(SVMConfiguration &config) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".resultsToLabels() Started." + ); + + size_t n_docs = config.result.n_rows; + double doc_result = 0; + for (int i=0; i < n_docs; ++i) { + doc_result = config.result[i]; + + //arma::vec doc_result_vec; + //doc_result_vec << doc_result << arma::endr; + //arma::vec result_sign_vec = arma::sign(doc_result_vec); + //doc_result = result_sign_vec[0]; + + // Store user-defined label + if (doc_result < 0) { + config.result[i] = config.neg_target; + } else if (doc_result > 0) { + config.result[i] = config.pos_target; + } else { + config.result[i] = 0; + } + } + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".resultsToLabels() Done." + ); +} + + +void SVMLightRunner::_clear() { + extern double *primal; + primal = 0; + extern double *dual; + dual = 0; + extern long precision_violations; + precision_violations = 0; + extern double *buffer; + buffer = 0; + extern long *nonoptimal; + nonoptimal = 0; + extern long smallroundcount; + smallroundcount = 0; + extern long roundnumber; + roundnumber = 0; + extern long kernel_cache_statistic; + kernel_cache_statistic = 0; +} + + +/* Library functionalities wrappers */ + +int SVMLightRunner::librarySVMLearnMain( + int argc, char **argv, bool use_gmumr, SVMConfiguration &config +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".librarySVMLearnMain() Started." + ); + DOC **docs; /* training examples */ + long totwords,totdoc,i; + double *target; + double *alpha_in=NULL; + KERNEL_CACHE *kernel_cache; + LEARN_PARM learn_parm; + KERNEL_PARM kernel_parm; + MODEL *model=(MODEL *)my_malloc(sizeof(MODEL)); + + // GMUM.R changes { + librarySVMLearnReadInputParameters( + argc, argv, docfile, modelfile, restartfile, &verbosity, &learn_parm, + &kernel_parm, use_gmumr, config + ); + + kernel_parm.kernel_type = static_cast(config.kernel_type); + + libraryReadDocuments( + docfile, &docs, &target, &totwords, &totdoc, use_gmumr, config + ); + // GMUM.R changes } + + if(restartfile[0]) alpha_in=read_alphas(restartfile,totdoc); + + if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */ + kernel_cache=NULL; + } + else { + /* Always get a new kernel cache. It is not possible to use the + * same cache for two different training runs */ + kernel_cache=kernel_cache_init(totdoc,learn_parm.kernel_cache_size); + } + + //gmum.r + init_global_params_QP(); + + if(learn_parm.type == CLASSIFICATION) { + svm_learn_classification(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,kernel_cache,model,alpha_in); + } + else if(learn_parm.type == REGRESSION) { + svm_learn_regression(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,&kernel_cache,model); + } + else if(learn_parm.type == RANKING) { + svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,&kernel_cache,model); + } + else if(learn_parm.type == OPTIMIZATION) { + svm_learn_optimization(docs,target,totdoc,totwords,&learn_parm, + &kernel_parm,kernel_cache,model,alpha_in); + } + //gmum.r + config.iter = learn_parm.iterations; + + if(kernel_cache) { + /* Free the memory used for the cache. */ + kernel_cache_cleanup(kernel_cache); + } + + /* Warning: The model contains references to the original data 'docs'. + If you want to free the original data, and only keep the model, you + have to make a deep copy of 'model'. */ + /* deep_copy_of_model=copy_model(model); */ + // GMUM.R changes { + if (!use_gmumr) { + write_model(modelfile,model); + } else { + SVMLightModelToSVMConfiguration(model, config); + } + // GMUM.R changes } + + free(alpha_in); + free_model(model,0); + for(i=0;ipredfile, "trans_predictions"); + strcpy (learn_parm->alphafile, ""); + strcpy (restartfile, ""); + // SVMLight verbosity = 1 corresponds to GMUM.R verbosity = 5 (DEBUG_LEVEL) + (*verbosity)=config.log.verbosity - 4; + learn_parm->biased_hyperplane=1; + learn_parm->sharedslack=0; + learn_parm->remove_inconsistent=0; + learn_parm->skip_final_opt_check=0; + learn_parm->svm_maxqpsize=10; + learn_parm->svm_newvarsinqp=0; + learn_parm->svm_iter_to_shrink=-9999; + learn_parm->kernel_cache_size=40; + /* upper bound C on alphas */ + learn_parm->svm_c=config.C; + learn_parm->eps=0.1; + learn_parm->transduction_posratio=-1.0; + /* factor to multiply C for positive examples */ + learn_parm->svm_costratio=1.0; + + /* FIXME: Find what this one does */ + learn_parm->svm_costratio_unlab=1.0; + learn_parm->svm_unlabbound=1E-5; + learn_parm->epsilon_crit=0.001; + learn_parm->epsilon_a=1E-15; + learn_parm->compute_loo=0; + learn_parm->rho=1.0; + learn_parm->xa_depth=0; + kernel_parm->kernel_type=0; + kernel_parm->poly_degree=3; + kernel_parm->rbf_gamma=1.0; + kernel_parm->coef_lin=1; + kernel_parm->coef_const=1; + strcpy(kernel_parm->custom,"empty"); + strcpy(type,"c"); + + if (config.max_iter == -1) + learn_parm->maxiter = 100000; + else + learn_parm->maxiter = config.max_iter; + + if (static_cast(config.kernel_type) == 3) { + // sigmoid tanh(s a*b + c) + // s = 1.0/highest_feature_index + kernel_parm->coef_lin = 1.0/config.getDataDim(); + // c + kernel_parm->coef_const = -1.0; + } + + /* set userdefined */ + if (config.degree){ + kernel_parm->poly_degree = config.degree; + } + if (config.gamma){ + kernel_parm->rbf_gamma = config.gamma; + } + if (config.gamma){ + kernel_parm->coef_lin = config.gamma; + } + if (config.coef0){ + kernel_parm->coef_const = config.coef0; + } + + learn_parm->transduction_posratio = config.transductive_posratio; + //This is tricky - both LIBSVM and SVMLIGHT have same default eps. + //However in general we should do things like that + learn_parm->epsilon_crit=config.eps; + + for(i=1;(ibiased_hyperplane=atol(argv[i]); break; + case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break; + case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break; + case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break; + case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break; + case '#': i++; learn_parm->maxiter=atol(argv[i]); break; + case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break; + case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break; + case 'c': i++; learn_parm->svm_c=atof(argv[i]); break; + case 'w': i++; learn_parm->eps=atof(argv[i]); break; + case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break; + case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break; + case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break; + case 'o': i++; learn_parm->rho=atof(argv[i]); break; + case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break; + case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break; + case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break; + case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break; + case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break; + case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break; + case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break; + case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break; + case 'l': i++; strcpy(learn_parm->predfile,argv[i]); break; + case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break; + case 'y': i++; strcpy(restartfile,argv[i]); break; + default: C_PRINTF("\n[SVMLight] Unrecognized option %s!\n\n",argv[i]); + libraryPrintHelp(); + EXIT(0); + } + } + + // GMUM.R changes } + if(!use_gmumr) { + if(i>=argc) { + C_PRINTF("\nNot enough input parameters!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + strcpy (docfile, argv[i]); + } + // GMUM.R changes } + if((i+1)svm_iter_to_shrink == -9999) { + if(kernel_parm->kernel_type == LINEAR) + learn_parm->svm_iter_to_shrink=2; + else + learn_parm->svm_iter_to_shrink=100; + } + if(strcmp(type,"c")==0) { + learn_parm->type=CLASSIFICATION; + } + else if(strcmp(type,"r")==0) { + learn_parm->type=REGRESSION; + } + else if(strcmp(type,"p")==0) { + learn_parm->type=RANKING; + } + else if(strcmp(type,"o")==0) { + learn_parm->type=OPTIMIZATION; + } + else if(strcmp(type,"s")==0) { + learn_parm->type=OPTIMIZATION; + learn_parm->sharedslack=1; + } + else { + C_PRINTF("\nUnknown type '%s': Valid types are 'c' (classification), 'r' regession, and 'p' preference ranking.\n",type); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if((learn_parm->skip_final_opt_check) + && (kernel_parm->kernel_type == LINEAR)) { + C_PRINTF("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n"); + learn_parm->skip_final_opt_check=0; + } + if((learn_parm->skip_final_opt_check) + && (learn_parm->remove_inconsistent)) { + C_PRINTF("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if((learn_parm->svm_maxqpsize<2)) { + C_PRINTF("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if((learn_parm->svm_maxqpsizesvm_newvarsinqp)) { + C_PRINTF("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize); + C_PRINTF("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->svm_iter_to_shrink<1) { + C_PRINTF("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->svm_c<0) { + C_PRINTF("\nThe C parameter must be greater than zero!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->transduction_posratio>1) { + C_PRINTF("\nThe fraction of unlabeled examples to classify as positives must\n"); + C_PRINTF("be less than 1.0 !!!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->svm_costratio<=0) { + C_PRINTF("\nThe COSTRATIO parameter must be greater than zero!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->epsilon_crit<=0) { + C_PRINTF("\nThe epsilon parameter must be greater than zero!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if(learn_parm->rho<0) { + C_PRINTF("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n"); + C_PRINTF("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n"); + C_PRINTF("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } + if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) { + C_PRINTF("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n"); + C_PRINTF("for switching to the conventional xa/estimates described in T. Joachims,\n"); + C_PRINTF("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n"); + libraryWaitAnyKey(); + libraryPrintHelp(); + EXIT(0); + } +} + + + + +int SVMLightRunner::librarySVMClassifyMain( + int argc, char **argv, bool use_gmumr, SVMConfiguration &config +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".librarySVMClassifyMain() Started." + ); + DOC *doc; /* test example */ + WORD *words; + long max_docs,max_words_doc,lld; + long totdoc=0,queryid,slackid; + long correct=0,incorrect=0,no_accuracy=0; + long res_a=0,res_b=0,res_c=0,res_d=0,wnum,pred_format; + long j; + double t1,runtime=0; + double dist,doc_label,costfactor; + char *line,*comment; + FILE *predfl,*docfl; + MODEL *model; + + // GMUM.R changes { + librarySVMClassifyReadInputParameters( + argc, argv, docfile, modelfile, predictionsfile, &verbosity, + &pred_format, use_gmumr, config); + + if (!use_gmumr) { + nol_ll(docfile,&max_docs,&max_words_doc,&lld); /* scan size of input file */ + lld+=2; + + line = (char *)my_malloc(sizeof(char)*lld); + } else { + max_docs = config.target.n_rows; + max_words_doc = config.getDataDim(); + config.result = arma::zeros(max_docs); + // Prevent writing to the file + pred_format = -1; + // lld used only for file reading + } + max_words_doc+=2; + words = (WORD *)my_malloc(sizeof(WORD)*(max_words_doc+10)); + // GMUM.R changes } + + model=libraryReadModel(modelfile, use_gmumr, config); + // GMUM.R changes } + + if(model->kernel_parm.kernel_type == 0) { /* linear kernel */ + /* compute weight vector */ + add_weight_vector_to_linear_model(model); + } + + if(verbosity>=2) { + C_PRINTF("Classifying test examples.."); C_FFLUSH(stdout); + } + + // GMUM.R changes { + bool newline; + if (!use_gmumr) { + if ((predfl = fopen (predictionsfile, "w")) == NULL) + { perror (predictionsfile); EXIT (1); } + if ((docfl = fopen (docfile, "r")) == NULL) + { perror (docfile); EXIT (1); } + + newline = (!feof(docfl)) && fgets(line,(int)lld,docfl); + } else { + newline = false; + if (totdoc < config.getDataExamplesNumber()) { + newline = true; + std::string str = SVMConfigurationToSVMLightLearnInputLine(config, totdoc); + line = new char[str.size() + 1]; + std::copy(str.begin(), str.end(), line); + line[str.size()] = '\0'; + } + } + while(newline) { + if (use_gmumr) { + std::string stringline = ""; + } + // GMUM.R changes } + if(line[0] == '#') continue; /* line contains comments */ + parse_document(line,words,&doc_label,&queryid,&slackid,&costfactor,&wnum, + max_words_doc,&comment); + totdoc++; + if(model->kernel_parm.kernel_type == 0) { /* linear kernel */ + for(j=0;(words[j]).wnum != 0;j++) { /* Check if feature numbers */ + if((words[j]).wnum>model->totwords) /* are not larger than in */ + (words[j]).wnum=0; /* model. Remove feature if */ + } /* necessary. */ + doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0)); + t1=get_runtime(); + dist=classify_example_linear(model,doc); + runtime+=(get_runtime()-t1); + free_example(doc,1); + } + else { /* non-linear kernel */ + doc = create_example(-1,0,0,0.0,create_svector(words,comment,1.0)); + t1=get_runtime(); + dist=classify_example(model,doc); + runtime+=(get_runtime()-t1); + free_example(doc,1); + } + if(dist>0) { + if(pred_format==0) { /* old weired output format */ + C_FPRINTF(predfl,"%.8g:+1 %.8g:-1\n",dist,-dist); + } + if(doc_label>0) correct++; else incorrect++; + if(doc_label>0) res_a++; else res_b++; + } + else { + if(pred_format==0) { /* old weired output format */ + C_FPRINTF(predfl,"%.8g:-1 %.8g:+1\n",-dist,dist); + } + if(doc_label<0) correct++; else incorrect++; + if(doc_label>0) res_c++; else res_d++; + } + if(pred_format==1) { /* output the value of decision function */ + C_FPRINTF(predfl,"%.8g\n",dist); + } + if((int)(0.01+(doc_label*doc_label)) != 1) + { no_accuracy=1; } /* test data is not binary labeled */ + if(verbosity>=2) { + if(totdoc % 100 == 0) { + C_PRINTF("%ld..",totdoc); C_FFLUSH(stdout); + } + } + // GMUM.R changes { + if (!use_gmumr) { + newline = (!feof(docfl)) && fgets(line,(int)lld,docfl); + } else { + newline = false; + // Store prediction result in config + config.result[totdoc-1] = dist; + // Read next line + if (totdoc < config.getDataExamplesNumber()) { + newline = true; + std::string str = SVMConfigurationToSVMLightLearnInputLine(config, totdoc); + line = new char[str.size() + 1]; + std::copy(str.begin(), str.end(), line); + line[str.size()] = '\0'; + } + } + } + if (!use_gmumr) { + fclose(predfl); + fclose(docfl); + free(line); + } + // GMUM.R changes } + free(words); + free_model(model,1); + + if(verbosity>=2) { + C_PRINTF("done\n"); + + /* Note by Gary Boone Date: 29 April 2000 */ + /* o Timing is inaccurate. The timer has 0.01 second resolution. */ + /* Because classification of a single vector takes less than */ + /* 0.01 secs, the timer was underflowing. */ + C_PRINTF("Runtime (without IO) in cpu-seconds: %.2f\n", + (float)(runtime/100.0)); + + } + if((!no_accuracy) && (verbosity>=1)) { + C_PRINTF("Accuracy on test set: %.2f%% (%ld correct, %ld incorrect, %ld total)\n",(float)(correct)*100.0/totdoc,correct,incorrect,totdoc); + C_PRINTF("Precision/recall on test set: %.2f%%/%.2f%%\n",(float)(res_a)*100.0/(res_a+res_b),(float)(res_a)*100.0/(res_a+res_c)); + } + + return(0); +} + + +void SVMLightRunner::librarySVMClassifyReadInputParameters( + int argc, char **argv, char *docfile, char *modelfile, + char *predictionsfile, long int *verbosity, long int *pred_format, + bool use_gmumr, SVMConfiguration &config +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".librarySVMClassifyReadInputParameters() Started." + ); + long i; + + // GMUM.R changes { + if (!use_gmumr) { + /* set default */ + strcpy (modelfile, "svm_model"); + strcpy (predictionsfile, "svm_predictions"); + (*verbosity)=2; + } else { + // SVMLight verbosity = 1 corresponds to GMUM.R verbosity = 5 (DEBUG_LEVEL) + (*verbosity) = config.log.verbosity - 4; + } + // GMUM.R changes } + (*pred_format)=1; + + for(i=1;(i=argc) { + C_PRINTF("\nNot enough input parameters!\n\n"); + libraryPrintHelp(); + EXIT(0); + } + strcpy (docfile, argv[i]); + strcpy (modelfile, argv[i+1]); + if((i+2)=1) { + C_PRINTF("Reading model..."); C_FFLUSH(stdout); + } + + // GMUM.R changes { + model = (MODEL *)my_malloc(sizeof(MODEL)); + + if (!use_gmumr) { + nol_ll(modelfile,&max_sv,&max_words,&ll); /* scan size of model file */ + max_words+=2; + ll+=2; + + words = (WORD *)my_malloc(sizeof(WORD)*(max_words+10)); + line = (char *)my_malloc(sizeof(char)*ll); + + if ((modelfl = fopen (modelfile, "r")) == NULL) + { perror (modelfile); EXIT (1); } + + fscanf(modelfl,"SVM-light Version %s\n",version_buffer); + if(strcmp(version_buffer,VERSION)) { + perror ("Version of model-file does not match version of svm_classify!"); + EXIT (1); + } + fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.kernel_type); + fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.poly_degree); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.rbf_gamma); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_lin); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_const); + fscanf(modelfl,"%[^#]%*[^\n]\n", model->kernel_parm.custom); + + fscanf(modelfl,"%ld%*[^\n]\n", &model->totwords); + fscanf(modelfl,"%ld%*[^\n]\n", &model->totdoc); + fscanf(modelfl,"%ld%*[^\n]\n", &model->sv_num); + fscanf(modelfl,"%lf%*[^\n]\n", &model->b); + } else { // use_gmumr + max_words = config.getDataDim(); + words = (WORD *)my_malloc(sizeof(WORD)*(max_words+10)); + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".libraryReadModel() Converting config to model..." + ); + + /* 0=linear, 1=poly, 2=rbf, 3=sigmoid, 4=custom -- same as GMUM.R! */ + model->kernel_parm.kernel_type = static_cast(config.kernel_type); + // -d int -> parameter d in polynomial kernel + model->kernel_parm.poly_degree = config.degree; + // -g float -> parameter gamma in rbf kernel + model->kernel_parm.rbf_gamma = config.gamma; + // -s float -> parameter s in sigmoid/poly kernel + model->kernel_parm.coef_lin = config.gamma; + // -r float -> parameter c in sigmoid/poly kernel + model->kernel_parm.coef_const = config.coef0; + // -u string -> parameter of user defined kernel + char kernel_parm_custom[50] = "empty"; + char * model_kernel_parm_custom = model->kernel_parm.custom; + model_kernel_parm_custom = kernel_parm_custom; + // highest feature index + model->totwords = config.getDataDim(); + // number of training documents + model->totdoc = config.target.n_rows; + // number of support vectors plus 1 (!) + model->sv_num = config.l + 1; + /* Threshold b (has opposite sign than SVMClient::predict()) + * In svm_common.c:57 in double classify_example_linear(): + * return(sum-model->b); + */ + model->b = - config.b; + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".libraryReadModel() Converting config done." + ); + } + // GMUM.R changes } + + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*model->sv_num); + model->alpha = (double *)my_malloc(sizeof(double)*model->sv_num); + model->index=NULL; + model->lin_weights=NULL; + + // GMUM.R changes { + if (!use_gmumr) { + for(i=1;isv_num;i++) { + fgets(line,(int)ll,modelfl); + if(!parse_document(line,words,&(model->alpha[i]),&queryid,&slackid, + &costfactor,&wpos,max_words,&comment)) { + C_PRINTF("\nParsing error while reading model file in SV %ld!\n%s", + i,line); + EXIT(1); + } + model->supvec[i] = create_example(-1, + 0,0, + 0.0, + create_svector(words,comment,1.0)); + } + fclose(modelfl); + free(line); + } else { + for(i = 1; i < model->sv_num; ++i) { + line = SVMConfigurationToSVMLightModelSVLine(config, i-1); + if(!parse_document(line,words,&(model->alpha[i]),&queryid,&slackid, + &costfactor,&wpos,max_words,&comment)) { + C_PRINTF("\nParsing error while reading model file in SV %ld!\n%s", + i,line); + EXIT(1); + } + model->supvec[i] = create_example(-1, + 0,0, + 0.0, + create_svector(words,comment,1.0)); + free(line); + } + } + // GMUM.R changes } + free(words); + if(verbosity>=1) { + C_FPRINTF(stdout, "OK. (%d support vectors read)\n",(int)(model->sv_num-1)); + } + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".libraryReadModel() Done." + ); + + return(model); +} + +void SVMLightRunner::libraryReadDocuments ( + char *docfile, DOC ***docs, double **label, long int *totwords, + long int *totdoc, bool use_gmumr, SVMConfiguration &config +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".libraryReadDocuments() Started." + ); + + char *line,*comment; + WORD *words; + long dnum=0,wpos,dpos=0,dneg=0,dunlab=0,queryid,slackid,max_docs; + long max_words_doc, ll; + double doc_label,costfactor; + FILE *docfl; + + if(verbosity>=1) { + C_PRINTF("Scanning examples..."); C_FFLUSH(stdout); + } + // GMUM.R changes { + if (!use_gmumr) { + nol_ll(docfile,&max_docs,&max_words_doc,&ll); /* scan size of input file */ + } else { + max_docs = config.target.n_rows; + max_words_doc = config.getDataDim(); + // ll used only for file reading + } + // GMUM.R changes } + max_words_doc+=2; + ll+=2; + max_docs+=2; + if(verbosity>=1) { + C_PRINTF("done\n"); C_FFLUSH(stdout); + } + + (*docs) = (DOC **)my_malloc(sizeof(DOC *)*max_docs); /* feature vectors */ + (*label) = (double *)my_malloc(sizeof(double)*max_docs); /* target values */ + // GMUM.R changes { + if (!use_gmumr) { + line = (char *)my_malloc(sizeof(char)*ll); + + if ((docfl = fopen (docfile, "r")) == NULL) + { perror (docfile); EXIT (1); } + } + // GMUM.R changes } + + words = (WORD *)my_malloc(sizeof(WORD)*(max_words_doc+10)); + if(verbosity>=1) { + C_PRINTF("Reading examples into memory..."); C_FFLUSH(stdout); + } + dnum=0; + (*totwords)=0; + // GMUM.R changes { + bool newline; + if (!use_gmumr) { + newline = (!feof(docfl)) && fgets(line,(int)ll,docfl); + } else { + newline = false; + if (dnum < config.target.n_rows) { + newline = true; + std::string str = SVMConfigurationToSVMLightLearnInputLine(config, dnum); + line = new char[str.size() + 1]; + std::copy(str.begin(), str.end(), line); + line[str.size()] = '\0'; + } + } + while(newline) { + if (use_gmumr) { + std::string stringline = ""; + } + // GMUM.R changes } + if(line[0] == '#') continue; /* line contains comments */ + if(!parse_document(line,words,&doc_label,&queryid,&slackid,&costfactor, + &wpos,max_words_doc,&comment)) { + C_PRINTF("\nParsing error in line %ld!\n%s",dnum,line); + EXIT(1); + } + (*label)[dnum]=doc_label; + /* C_PRINTF("docnum=%ld: Class=%f ",dnum,doc_label); */ + if(doc_label > 0) dpos++; + if (doc_label < 0) dneg++; + if (doc_label == 0) { + if(config.use_transductive_learning){ + dunlab++; + }else{ + C_PRINTF("Please for transductive learning pass use_transductive_learning\n"); + EXIT(1); + } + } + if((wpos>1) && ((words[wpos-2]).wnum>(*totwords))) + (*totwords)=(words[wpos-2]).wnum; + if((*totwords) > MAXFEATNUM) { + C_PRINTF("\nMaximum feature number exceeds limit defined in MAXFEATNUM!\n"); + EXIT(1); + } + (*docs)[dnum] = create_example(dnum,queryid,slackid,costfactor, + create_svector(words,comment,1.0)); + /* C_PRINTF("\nNorm=%f\n",((*docs)[dnum]->fvec)->twonorm_sq); */ + dnum++; + if(verbosity>=1) { + if((dnum % 100) == 0) { + C_PRINTF("%ld..",dnum); C_FFLUSH(stdout); + } + } + // GMUM.R changes { + if (!use_gmumr) { + newline = (!feof(docfl)) && fgets(line,(int)ll,docfl); + } else { + newline = false; + if (dnum < config.target.n_rows) { + newline = true; + std::string str = SVMConfigurationToSVMLightLearnInputLine(config, dnum); + line = new char[str.size() + 1]; + std::copy(str.begin(), str.end(), line); + line[str.size()] = '\0'; + } + } + // GMUM.R changes } + } + + if (!use_gmumr) { + fclose(docfl); + free(line); + }; + free(words); + if(verbosity>=1) { + C_FPRINTF(stdout, "OK. (%ld examples read)\n", dnum); + } + (*totdoc)=dnum; +} + + +std::string SVMLightRunner::SVMConfigurationToSVMLightLearnInputLine( + SVMConfiguration &config, long int line_num +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".SVMConfigurationToSVMLightLearnInputLine() Started." + ); + + std::string line_string = ""; + + std::ostringstream ss; + double target_value = config.target[line_num]; + + + + // Handle user-defined labels + if (target_value == config.neg_target) { + ss << -1; + } else if (target_value == config.pos_target) { + ss << 1; + } else if (!target_value) { + ss << 0; + }else{ + C_PRINTF("Unrecognized class label %f\n", target_value); + EXIT(1); + } + + // Optional feature: cost :) + + if (config.use_example_weights || (config.use_class_weights && target_value)) { + double weight = 1.0; + if(config.use_example_weights){ + weight *= config.example_weights[line_num]; + } + if(config.use_class_weights){ + weight *= (target_value == config.pos_target? config.class_weights[1] : config.class_weights[0]); + } + + ss << " cost:" << std::setprecision(8) << weight; + + } + + // Matrix type handling + if (config.isSparse()) { + int current_row = 0; + arma::sp_mat::iterator begin = config.getSparseData().begin_col(line_num); + arma::sp_mat::iterator end = config.getSparseData().end_col(line_num); + for (arma::sp_mat::iterator it = begin; it != end; ++it) { + ss << ' ' << it.row() + 1 << ':' << std::setprecision(8); + current_row = it.row(); + ss << *it; + } + //Always output last row + if(current_row != config.getSparseData().n_rows-1){ + ss << ' ' << config.getSparseData().n_rows << ':' << std::setprecision(8); + ss << config.getSparseData()(config.getSparseData().n_rows-1, line_num); + } + + } else { + for (long int i = 1; i <= config.data.n_cols; ++i) { + ss << ' ' << i << ':' << std::setprecision(8); + ss << config.data(line_num, i-1); + } + } + + ss << std::endl; + line_string = ss.str(); + + return line_string; +} + + +char * SVMLightRunner::SVMConfigurationToSVMLightModelSVLine( + SVMConfiguration &config, long int line_num +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".SVMConfigurationToSVMLightModelSVLine() Started." + ); + + + std::ostringstream ss; + ss << std::setprecision(32) << config.alpha_y[line_num]; + for (long int i = 1; i <= config.support_vectors.n_rows; ++i) { + ss << ' ' << i << ':' << std::setprecision(8) << config.support_vectors(i-1, line_num); + } + ss << " #" << std::endl; + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".SVMConfigurationToSVMLightModelSVLine() Done." + ); + + std::string line = ss.str(); + char * c_line = new char[line.length() + 1]; + strcpy(c_line, line.c_str()); + + //FIXME: well.. this hopefully will be fixed once we change way we interact with SVMLight + return(c_line); +} + +void SVMLightRunner::SVMLightModelToSVMConfiguration( + MODEL *model, SVMConfiguration &config +) { + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".SVMLightModelToSVMConfiguration() Started." + ); + + long i, j, k; + SVECTOR *v; + + /* 0=linear, 1=poly, 2=rbf, 3=sigmoid, 4=custom -- same as GMUM.R! */ + config.kernel_type = static_cast(model->kernel_parm.kernel_type); + // -d int -> parameter d in polynomial kernel + config.degree = model->kernel_parm.poly_degree; + // -g float -> parameter gamma in rbf kernel + config.gamma = model->kernel_parm.rbf_gamma; + // -s float -> parameter s in sigmoid/poly kernel + config.gamma = model->kernel_parm.coef_lin; + // -r float -> parameter c in sigmoid/poly kernel + config.coef0 = model->kernel_parm.coef_const; + // -u string -> parameter of user defined kernel + config.kernel_parm_custom = model->kernel_parm.custom; + // highest feature index - no assignment to read-only data + //config->data.n_cols = model->totwords; + // number of training documents - no assignment to read-only data + //config->target.n_rows = model->totdoc; + // number of support vectors plus 1 (!) + config.l = model->sv_num - 1; + // Threshold b (has opposite sign than SVMClient::predict() + // NOTE: see libraryReadModel() + config.b = - model->b; + config.nr_class = 2; // svmlight works only with 2 classes + + config.alpha_y = arma::zeros(config.l); + int dim = model->totwords; + + // Constructing alphas and SV: + arma::uvec colptr(config.l+1); //this always has this dim + colptr(0) = 0; //always + + int non_zero = 0; + //get necessary statistics + for (i = 1; i < model->sv_num; i++) { + for (v = model->supvec[i]->fvec; v; v=v->next) { + for (j = 0; (v->words[j]).wnum; j++) { + non_zero += 1; + } + } + } + arma::uvec rowind(non_zero); + arma::vec values(non_zero); + + int current = 0; + for (i = 1; i < model->sv_num; i++) { + for (v = model->supvec[i]->fvec; v; v=v->next) { + config.alpha_y(i - 1) = model->alpha[i]*v->factor; + for (j = 0; (v->words[j]).wnum; j++) { + k = (v->words[j]).wnum - 1; + rowind[current] = k; + values[current] = v->words[j].weight; + current++; + } + colptr(i) = current; + } + } + + config.support_vectors = arma::sp_mat(rowind, colptr, values, dim, config.l); + config.w = (config.support_vectors * config.alpha_y); + + LOG( + config.log, + LogLevel::DEBUG_LEVEL, + __debug_prefix__ + ".SVMLightModelToSVMConfiguration() Done." + ); + +} + +void SVMLightRunner::libraryWaitAnyKey() +{ + C_PRINTF("\n(more)\n"); + (void)getc(stdin); +} + + +void SVMLightRunner::libraryPrintHelp() +{ + C_PRINTF("\nSVM-light %s: Support Vector Machine, learning module %s\n",VERSION,VERSION_DATE); + copyright_notice(); +} + diff --git a/src/svm/two_e_svm_post.cpp b/src/svm/two_e_svm_post.cpp new file mode 100644 index 00000000..5cdb490e --- /dev/null +++ b/src/svm/two_e_svm_post.cpp @@ -0,0 +1,38 @@ +#include + +#include "two_e_svm_post.h" +#include "svm_handler.h" +#include "svm_basic.h" +#include "svm_utils.h" + +using namespace arma; +using namespace std; + +void TwoeSVMPostprocessor::processRequest(SVMConfiguration &data) { + if (!data.prediction) { + + //TODO: Use copying memory for better memory optimalization + //DIM PROJECTION: examps x dim x dim x 1 = exams x 1 + arma::vec w = data.inv_of_sqrt_of_cov.t() * arma::vec(data.w); + + double p_plus = stddev(SvmUtils::matrixByValue(data.data, data.target, data.pos_target) * w); + double p_minus = stddev(SvmUtils::matrixByValue(data.data, data.target, data.neg_target) * w); + + //-rho = b + //TODO: consider multiclass examples + double b_dash = data.getB() + (p_plus - p_minus) / (p_plus + p_minus); + data.setB(b_dash); + + // keep w as sparse + data.w = arma::sp_mat(w.n_elem, 1); + for (int i = 0; i != w.n_elem; ++i) { + if (w(i) != 0) data.w(i,0) = w(i); + } + } + data.data = data.tmp_data; + data.target = data.tmp_target; +} + +bool TwoeSVMPostprocessor::canHandle(SVMConfiguration& data) { + return true; +} diff --git a/src/svm/two_e_svm_pre.cpp b/src/svm/two_e_svm_pre.cpp new file mode 100644 index 00000000..bdfa15cd --- /dev/null +++ b/src/svm/two_e_svm_pre.cpp @@ -0,0 +1,77 @@ +#include +#include "two_e_svm_pre.h" + +#include "svm_handler.h" +#include "svm_basic.h" +#include "svm_utils.h" +#include +#include + +using namespace arma; +using namespace std; + +void TwoeSVMPreprocessor::processRequest(SVMConfiguration& data) { + data.tmp_data = data.data; + data.tmp_target = data.target; + + if (!data.prediction) { + double targets[2] = {std::numeric_limits::infinity(), std::numeric_limits::infinity()}; + for(unsigned i = 0; i < data.target.n_rows; i++) { + double elem = data.target(i, 0); + if(targets[0] == std::numeric_limits::infinity()) + targets[0] = elem; + else if(targets[0] != elem) { + if(targets[1] == std::numeric_limits::infinity()) { + targets[1] = elem; + } else { + if(targets[1] != elem) { + data.error_msg = "Class should have only 2 elements"; + ASSERT(targets[1] != elem); + return; + } + } + } + } + + data.pos_target = targets[0] > targets[1] ? targets[0] : targets[1]; + data.neg_target = targets[0] > targets[1] ? targets[1] : targets[0]; + + mat pos = SvmUtils::matrixByValue(data.data, data.target, data.pos_target); + mat neg = SvmUtils::matrixByValue(data.data, data.target, data.neg_target); + + //norm type 1, sampling from population, cause cec is using it :-( + mat cov_pos, cov_neg; + int max_rows = pos.n_cols; + if(pos.n_rows == 1) { + cov_pos = zeros(max_rows, max_rows); + } else { + cov_pos = cov(pos); + } + if(neg.n_rows == 1) { + cov_neg = zeros(max_rows, max_rows); + } else { + cov_neg = cov(neg); + } + //norm type 1, sampling from population, cause cec is using it :-( + + mat cov0 = cov_pos + cov_neg; + //cov0 += data.cov_eps_smoothing_start * arma::eye(data.data.n_cols, data.data.n_cols); + SvmUtils::sqrtInvMat(cov0, cov0InvSqrt, data.cov_eps_smoothing_start); + data.inv_of_sqrt_of_cov = cov0InvSqrt; + mat X_dash_plus = cov0InvSqrt * pos.t(); + mat X_dash_minus = cov0InvSqrt * neg.t();; + vec pos_target_vec = vec(pos.n_rows); + vec neg_target_vec = vec(neg.n_rows); + pos_target_vec.fill(data.pos_target); + neg_target_vec.fill(data.neg_target); + data.target = join_vert(pos_target_vec, neg_target_vec); + data.data = join_vert(X_dash_plus.t(), X_dash_minus.t()); + } else { + data.data = data.data * cov0InvSqrt.t() ; + } +} + +bool TwoeSVMPreprocessor::canHandle(SVMConfiguration& data) { + // TODO + return true; +} diff --git a/src/svm/validator_runner.cpp b/src/svm/validator_runner.cpp new file mode 100644 index 00000000..e69de29b diff --git a/src/svmlight/svm_common.c b/src/svmlight/svm_common.c new file mode 100644 index 00000000..618c08b8 --- /dev/null +++ b/src/svmlight/svm_common.c @@ -0,0 +1,987 @@ +/************************************************************************/ +/* */ +/* svm_common.c */ +/* */ +/* Definitions and functions used in both svm_learn and svm_classify. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.04 */ +/* */ +/* Copyright (c) 2004 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/************************************************************************/ + +# include "ctype.h" +# include "svm_common.h" +# include "kernel.h" /* this contains a user supplied kernel */ + +#include "utils/cutils.h" + +long verbosity; /* verbosity level (0-4) */ +long kernel_cache_statistic; + +double classify_example(MODEL *model, DOC *ex) + /* classifies one example */ +{ + register long i; + register double dist; + + if((model->kernel_parm.kernel_type == LINEAR) && (model->lin_weights)) + return(classify_example_linear(model,ex)); + + dist=0; + for(i=1;isv_num;i++) { + dist+=kernel(&model->kernel_parm,model->supvec[i],ex)*model->alpha[i]; + } + return(dist-model->b); +} + +double classify_example_linear(MODEL *model, DOC *ex) + /* classifies example for linear kernel */ + + /* important: the model must have the linear weight vector computed */ + /* use: add_weight_vector_to_linear_model(&model); */ + + + /* important: the feature numbers in the example to classify must */ + /* not be larger than the weight vector! */ +{ + double sum=0; + SVECTOR *f; + + for(f=ex->fvec;f;f=f->next) + sum+=f->factor*sprod_ns(model->lin_weights,f); + return(sum-model->b); +} + + +double kernel(KERNEL_PARM *kernel_parm, DOC *a, DOC *b) + /* calculate the kernel function */ +{ + double sum=0; + SVECTOR *fa,*fb; + + /* in case the constraints are sums of feature vector as represented + as a list of SVECTOR's with their coefficient factor in the sum, + take the kernel between all pairs */ + for(fa=a->fvec;fa;fa=fa->next) { + for(fb=b->fvec;fb;fb=fb->next) { + if(fa->kernel_id == fb->kernel_id) + sum+=fa->factor*fb->factor*single_kernel(kernel_parm,fa,fb); + } + } + return(sum); +} + +double single_kernel(KERNEL_PARM *kernel_parm, SVECTOR *a, SVECTOR *b) + /* calculate the kernel function between two vectors */ +{ + kernel_cache_statistic++; + switch(kernel_parm->kernel_type) { + case 0: /* linear */ + return(sprod_ss(a,b)); + case 1: /* polynomial */ + return(pow(kernel_parm->coef_lin*sprod_ss(a,b)+kernel_parm->coef_const,(double)kernel_parm->poly_degree)); + case 2: /* radial basis function */ + return(exp(-kernel_parm->rbf_gamma*(a->twonorm_sq-2*sprod_ss(a,b)+b->twonorm_sq))); + case 3: /* sigmoid neural net */ + return(tanh(kernel_parm->coef_lin*sprod_ss(a,b)+kernel_parm->coef_const)); + case 4: /* custom-kernel supplied in file kernel.h*/ + return(custom_kernel(kernel_parm,a,b)); + default: C_PRINTF("Error: Unknown kernel function\n"); C_EXIT(1); return -1.0; + } +} + + +SVECTOR *create_svector(WORD *words,char *userdefined,double factor) +{ + SVECTOR *vec; + long fnum,i; + + fnum=0; + while(words[fnum].wnum) { + fnum++; + } + fnum++; + vec = (SVECTOR *)my_malloc(sizeof(SVECTOR)); + vec->words = (WORD *)my_malloc(sizeof(WORD)*(fnum)); + for(i=0;iwords[i]=words[i]; + } + vec->twonorm_sq=sprod_ss(vec,vec); + + fnum=0; + while(userdefined[fnum]) { + fnum++; + } + fnum++; + vec->userdefined = (char *)my_malloc(sizeof(char)*(fnum)); + for(i=0;iuserdefined[i]=userdefined[i]; + } + vec->kernel_id=0; + vec->next=NULL; + vec->factor=factor; + return(vec); +} + +SVECTOR *copy_svector(SVECTOR *vec) +{ + SVECTOR *newvec=NULL; + if(vec) { + newvec=create_svector(vec->words,vec->userdefined,vec->factor); + newvec->next=copy_svector(vec->next); + } + return(newvec); +} + +void free_svector(SVECTOR *vec) +{ + if(vec) { + free(vec->words); + if(vec->userdefined) + free(vec->userdefined); + free_svector(vec->next); + free(vec); + } +} + +double sprod_ss(SVECTOR *a, SVECTOR *b) + /* compute the inner product of two sparse vectors */ +{ + register double sum=0; + register WORD *ai,*bj; + ai=a->words; + bj=b->words; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + bj++; + } + else if (ai->wnum < bj->wnum) { + ai++; + } + else { + sum+=(ai->weight) * (bj->weight); + ai++; + bj++; + } + } + return((double)sum); +} + +SVECTOR* sub_ss(SVECTOR *a, SVECTOR *b) + /* compute the difference a-b of two sparse vectors */ + /* Note: SVECTOR lists are not followed, but only the first + SVECTOR is used */ +{ + SVECTOR *vec; + register WORD *sum,*sumi; + register WORD *ai,*bj; + long veclength; + + ai=a->words; + bj=b->words; + veclength=0; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + veclength++; + bj++; + } + else if (ai->wnum < bj->wnum) { + veclength++; + ai++; + } + else { + veclength++; + ai++; + bj++; + } + } + while (bj->wnum) { + veclength++; + bj++; + } + while (ai->wnum) { + veclength++; + ai++; + } + veclength++; + + sum=(WORD *)my_malloc(sizeof(WORD)*veclength); + sumi=sum; + ai=a->words; + bj=b->words; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + (*sumi)=(*bj); + sumi->weight*=(-1); + sumi++; + bj++; + } + else if (ai->wnum < bj->wnum) { + (*sumi)=(*ai); + sumi++; + ai++; + } + else { + (*sumi)=(*ai); + sumi->weight-=bj->weight; + if(sumi->weight != 0) + sumi++; + ai++; + bj++; + } + } + while (bj->wnum) { + (*sumi)=(*bj); + sumi->weight*=(-1); + sumi++; + bj++; + } + while (ai->wnum) { + (*sumi)=(*ai); + sumi++; + ai++; + } + sumi->wnum=0; + + vec=create_svector(sum,"",1.0); + free(sum); + + return(vec); +} + +SVECTOR* add_ss(SVECTOR *a, SVECTOR *b) + /* compute the sum a+b of two sparse vectors */ + /* Note: SVECTOR lists are not followed, but only the first + SVECTOR is used */ +{ + SVECTOR *vec; + register WORD *sum,*sumi; + register WORD *ai,*bj; + long veclength; + + ai=a->words; + bj=b->words; + veclength=0; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + veclength++; + bj++; + } + else if (ai->wnum < bj->wnum) { + veclength++; + ai++; + } + else { + veclength++; + ai++; + bj++; + } + } + while (bj->wnum) { + veclength++; + bj++; + } + while (ai->wnum) { + veclength++; + ai++; + } + veclength++; + + /*** is veclength=lengSequence(a)+lengthSequence(b)? ***/ + + sum=(WORD *)my_malloc(sizeof(WORD)*veclength); + sumi=sum; + ai=a->words; + bj=b->words; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + (*sumi)=(*bj); + sumi++; + bj++; + } + else if (ai->wnum < bj->wnum) { + (*sumi)=(*ai); + sumi++; + ai++; + } + else { + (*sumi)=(*ai); + sumi->weight+=bj->weight; + if(sumi->weight != 0) + sumi++; + ai++; + bj++; + } + } + while (bj->wnum) { + (*sumi)=(*bj); + sumi++; + bj++; + } + while (ai->wnum) { + (*sumi)=(*ai); + sumi++; + ai++; + } + sumi->wnum=0; + + vec=create_svector(sum,"",1.0); + free(sum); + + return(vec); +} + +SVECTOR* add_list_ss(SVECTOR *a) + /* computes the linear combination of the SVECTOR list weighted + by the factor of each SVECTOR */ +{ + SVECTOR *scaled,*oldsum,*sum,*f; + WORD empty[2]; + + if(a){ + sum=smult_s(a,a->factor); + for(f=a->next;f;f=f->next) { + scaled=smult_s(f,f->factor); + oldsum=sum; + sum=add_ss(sum,scaled); + free_svector(oldsum); + free_svector(scaled); + } + sum->factor=1.0; + } + else { + empty[0].wnum=0; + sum=create_svector(empty,"",1.0); + } + return(sum); +} + +void append_svector_list(SVECTOR *a, SVECTOR *b) + /* appends SVECTOR b to the end of SVECTOR a. */ +{ + SVECTOR *f; + + for(f=a;f->next;f=f->next); /* find end of first vector list */ + f->next=b; /* append the two vector lists */ +} + +SVECTOR* smult_s(SVECTOR *a, double factor) + /* scale sparse vector a by factor */ +{ + SVECTOR *vec; + register WORD *sum,*sumi; + register WORD *ai; + long veclength; + + ai=a->words; + veclength=0; + while (ai->wnum) { + veclength++; + ai++; + } + veclength++; + + sum=(WORD *)my_malloc(sizeof(WORD)*veclength); + sumi=sum; + ai=a->words; + while (ai->wnum) { + (*sumi)=(*ai); + sumi->weight*=factor; + if(sumi->weight != 0) + sumi++; + ai++; + } + sumi->wnum=0; + + vec=create_svector(sum,a->userdefined,a->factor); + free(sum); + + return(vec); +} + +int featvec_eq(SVECTOR *a, SVECTOR *b) + /* tests two sparse vectors for equality */ +{ + register WORD *ai,*bj; + ai=a->words; + bj=b->words; + while (ai->wnum && bj->wnum) { + if(ai->wnum > bj->wnum) { + if((bj->weight) != 0) + return(0); + bj++; + } + else if (ai->wnum < bj->wnum) { + if((ai->weight) != 0) + return(0); + ai++; + } + else { + if((ai->weight) != (bj->weight)) + return(0); + ai++; + bj++; + } + } + return(1); +} + +double model_length_s(MODEL *model, KERNEL_PARM *kernel_parm) + /* compute length of weight vector */ +{ + register long i,j; + register double sum=0,alphai; + register DOC *supveci; + + for(i=1;isv_num;i++) { + alphai=model->alpha[i]; + supveci=model->supvec[i]; + for(j=1;jsv_num;j++) { + sum+=alphai*model->alpha[j] + *kernel(kernel_parm,supveci,model->supvec[j]); + } + } + return(sqrt(sum)); +} + +void clear_vector_n(double *vec, long int n) +{ + register long i; + for(i=0;i<=n;i++) vec[i]=0; +} + +void add_vector_ns(double *vec_n, SVECTOR *vec_s, double faktor) +{ + register WORD *ai; + ai=vec_s->words; + while (ai->wnum) { + vec_n[ai->wnum]+=(faktor*ai->weight); + ai++; + } +} + +double sprod_ns(double *vec_n, SVECTOR *vec_s) +{ + register double sum=0; + register WORD *ai; + ai=vec_s->words; + while (ai->wnum) { + sum+=(vec_n[ai->wnum]*ai->weight); + ai++; + } + return(sum); +} + +void add_weight_vector_to_linear_model(MODEL *model) + /* compute weight vector in linear case and add to model */ +{ + long i; + SVECTOR *f; + + model->lin_weights=(double *)my_malloc(sizeof(double)*(model->totwords+1)); + clear_vector_n(model->lin_weights,model->totwords); + for(i=1;isv_num;i++) { + for(f=(model->supvec[i])->fvec;f;f=f->next) + add_vector_ns(model->lin_weights,f,f->factor*model->alpha[i]); + } +} + + +DOC *create_example(long docnum, long queryid, long slackid, + double costfactor, SVECTOR *fvec) +{ + DOC *example; + example = (DOC *)my_malloc(sizeof(DOC)); + example->docnum=docnum; + example->queryid=queryid; + example->slackid=slackid; + example->costfactor=costfactor; + example->fvec=fvec; + return(example); +} + +void free_example(DOC *example, long deep) +{ + if(example) { + if(deep) { + if(example->fvec) + free_svector(example->fvec); + } + free(example); + } +} + +void write_model(char *modelfile, MODEL *model) +{ + FILE *modelfl; + long j,i,sv_num; + SVECTOR *v; + + if(verbosity>=1) { + C_PRINTF("Writing model file..."); C_FFLUSH(stdout); + } + if ((modelfl = fopen (modelfile, "w")) == NULL) + { perror (modelfile); C_EXIT (1); } + C_FPRINTF(modelfl,"SVM-light Version %s\n",VERSION); + C_FPRINTF(modelfl,"%ld # kernel type\n", + model->kernel_parm.kernel_type); + C_FPRINTF(modelfl,"%ld # kernel parameter -d \n", + model->kernel_parm.poly_degree); + C_FPRINTF(modelfl,"%.8g # kernel parameter -g \n", + model->kernel_parm.rbf_gamma); + C_FPRINTF(modelfl,"%.8g # kernel parameter -s \n", + model->kernel_parm.coef_lin); + C_FPRINTF(modelfl,"%.8g # kernel parameter -r \n", + model->kernel_parm.coef_const); + C_FPRINTF(modelfl,"%s# kernel parameter -u \n",model->kernel_parm.custom); + C_FPRINTF(modelfl,"%ld # highest feature index \n",model->totwords); + C_FPRINTF(modelfl,"%ld # number of training documents \n",model->totdoc); + + sv_num=1; + for(i=1;isv_num;i++) { + for(v=model->supvec[i]->fvec;v;v=v->next) + sv_num++; + } + C_FPRINTF(modelfl,"%ld # number of support vectors plus 1 \n",sv_num); + C_FPRINTF(modelfl,"%.8g # threshold b, each following line is a SV (starting with alpha*y)\n",model->b); + + for(i=1;isv_num;i++) { + for(v=model->supvec[i]->fvec;v;v=v->next) { + C_FPRINTF(modelfl,"%.32g ",model->alpha[i]*v->factor); + for (j=0; (v->words[j]).wnum; j++) { + C_FPRINTF(modelfl,"%ld:%.8g ", + (long)(v->words[j]).wnum, + (double)(v->words[j]).weight); + } + C_FPRINTF(modelfl,"#%s\n",v->userdefined); + /* NOTE: this could be made more efficient by summing the + alpha's of identical vectors before writing them to the + file. */ + } + } + fclose(modelfl); + if(verbosity>=1) { + C_PRINTF("done\n"); + } +} + + +MODEL *read_model(char *modelfile) +{ + FILE *modelfl; + long i,queryid,slackid; + double costfactor; + long max_sv,max_words,ll,wpos; + char *line,*comment; + WORD *words; + char version_buffer[100]; + MODEL *model; + + if(verbosity>=1) { + C_PRINTF("Reading model..."); C_FFLUSH(stdout); + } + + nol_ll(modelfile,&max_sv,&max_words,&ll); /* scan size of model file */ + max_words+=2; + ll+=2; + + words = (WORD *)my_malloc(sizeof(WORD)*(max_words+10)); + line = (char *)my_malloc(sizeof(char)*ll); + model = (MODEL *)my_malloc(sizeof(MODEL)); + + if ((modelfl = fopen (modelfile, "r")) == NULL) + { perror (modelfile); C_EXIT (1); } + + fscanf(modelfl,"SVM-light Version %s\n",version_buffer); + if(strcmp(version_buffer,VERSION)) { + perror ("Version of model-file does not match version of svm_classify!"); + C_EXIT (1); + } + fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.kernel_type); + fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.poly_degree); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.rbf_gamma); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_lin); + fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_const); + fscanf(modelfl,"%[^#]%*[^\n]\n", model->kernel_parm.custom); + + fscanf(modelfl,"%ld%*[^\n]\n", &model->totwords); + fscanf(modelfl,"%ld%*[^\n]\n", &model->totdoc); + fscanf(modelfl,"%ld%*[^\n]\n", &model->sv_num); + fscanf(modelfl,"%lf%*[^\n]\n", &model->b); + + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*model->sv_num); + model->alpha = (double *)my_malloc(sizeof(double)*model->sv_num); + model->index=NULL; + model->lin_weights=NULL; + + for(i=1;isv_num;i++) { + fgets(line,(int)ll,modelfl); + if(!parse_document(line,words,&(model->alpha[i]),&queryid,&slackid, + &costfactor,&wpos,max_words,&comment)) { + C_PRINTF("\nParsing error while reading model file in SV %ld!\n%s", + i,line); + C_EXIT(1); + } + model->supvec[i] = create_example(-1, + 0,0, + 0.0, + create_svector(words,comment,1.0)); + } + fclose(modelfl); + free(line); + free(words); + if(verbosity>=1) { + C_FPRINTF(stdout, "OK. (%d support vectors read)\n",(int)(model->sv_num-1)); + } + return(model); +} + +MODEL *copy_model(MODEL *model) +{ + MODEL *newmodel; + long i; + + newmodel=(MODEL *)my_malloc(sizeof(MODEL)); + (*newmodel)=(*model); + newmodel->supvec = (DOC **)my_malloc(sizeof(DOC *)*model->sv_num); + newmodel->alpha = (double *)my_malloc(sizeof(double)*model->sv_num); + newmodel->index = NULL; /* index is not copied */ + newmodel->supvec[0] = NULL; + newmodel->alpha[0] = 0; + for(i=1;isv_num;i++) { + newmodel->alpha[i]=model->alpha[i]; + newmodel->supvec[i]=create_example(model->supvec[i]->docnum, + model->supvec[i]->queryid,0, + model->supvec[i]->costfactor, + copy_svector(model->supvec[i]->fvec)); + } + if(model->lin_weights) { + newmodel->lin_weights = (double *)my_malloc(sizeof(double)*(model->totwords+1)); + for(i=0;itotwords+1;i++) + newmodel->lin_weights[i]=model->lin_weights[i]; + } + return(newmodel); +} + +void free_model(MODEL *model, int deep) +{ + long i; + + if(model->supvec) { + if(deep) { + for(i=1;isv_num;i++) { + free_example(model->supvec[i],1); + } + } + free(model->supvec); + } + if(model->alpha) free(model->alpha); + if(model->index) free(model->index); + if(model->lin_weights) free(model->lin_weights); + free(model); +} + + +void read_documents(char *docfile, DOC ***docs, double **label, + long int *totwords, long int *totdoc) +{ + char *line,*comment; + WORD *words; + long dnum=0,wpos,dpos=0,dneg=0,dunlab=0,queryid,slackid,max_docs; + long max_words_doc, ll; + double doc_label,costfactor; + FILE *docfl; + + if(verbosity>=1) { + C_PRINTF("Scanning examples..."); C_FFLUSH(stdout); + } + nol_ll(docfile,&max_docs,&max_words_doc,&ll); /* scan size of input file */ + max_words_doc+=2; + ll+=2; + max_docs+=2; + if(verbosity>=1) { + C_PRINTF("done\n"); C_FFLUSH(stdout); + } + + (*docs) = (DOC **)my_malloc(sizeof(DOC *)*max_docs); /* feature vectors */ + (*label) = (double *)my_malloc(sizeof(double)*max_docs); /* target values */ + line = (char *)my_malloc(sizeof(char)*ll); + + if ((docfl = fopen (docfile, "r")) == NULL) + { perror (docfile); C_EXIT (1); } + + words = (WORD *)my_malloc(sizeof(WORD)*(max_words_doc+10)); + if(verbosity>=1) { + C_PRINTF("Reading examples into memory..."); C_FFLUSH(stdout); + } + dnum=0; + (*totwords)=0; + while((!feof(docfl)) && fgets(line,(int)ll,docfl)) { + if(line[0] == '#') continue; /* line contains comments */ + if(!parse_document(line,words,&doc_label,&queryid,&slackid,&costfactor, + &wpos,max_words_doc,&comment)) { + C_PRINTF("\nParsing error in line %ld!\n%s",dnum,line); + C_EXIT(1); + } + (*label)[dnum]=doc_label; + /* C_PRINTF("docnum=%ld: Class=%f ",dnum,doc_label); */ + if(doc_label > 0) dpos++; + if (doc_label < 0) dneg++; + if (doc_label == 0) dunlab++; + if((wpos>1) && ((words[wpos-2]).wnum>(*totwords))) + (*totwords)=(words[wpos-2]).wnum; + if((*totwords) > MAXFEATNUM) { + C_PRINTF("\nMaximum feature number exceeds limit defined in MAXFEATNUM!\n"); + C_PRINTF("LINE: %s\n",line); + C_EXIT(1); + } + (*docs)[dnum] = create_example(dnum,queryid,slackid,costfactor, + create_svector(words,comment,1.0)); + /* C_PRINTF("\nNorm=%f\n",((*docs)[dnum]->fvec)->twonorm_sq); */ + dnum++; + if(verbosity>=1) { + if((dnum % 100) == 0) { + C_PRINTF("%ld..",dnum); C_FFLUSH(stdout); + } + } + } + + fclose(docfl); + free(line); + free(words); + if(verbosity>=1) { + C_FPRINTF(stdout, "OK. (%ld examples read)\n", dnum); + } + (*totdoc)=dnum; +} + +int parse_document(char *line, WORD *words, double *label, + long *queryid, long *slackid, double *costfactor, + long int *numwords, long int max_words_doc, + char **comment) +{ + register long wpos,pos; + long wnum; + double weight; + int numread; + char featurepair[1000],junk[1000]; + + (*queryid)=0; + (*slackid)=0; + (*costfactor)=1; + + pos=0; + (*comment)=NULL; + while(line[pos] ) { /* cut off comments */ + if((line[pos] == '#') && (!(*comment))) { + line[pos]=0; + (*comment)=&(line[pos+1]); + } + if(line[pos] == '\n') { /* strip the CR */ + line[pos]=0; + } + pos++; + } + if(!(*comment)) (*comment)=&(line[pos]); + /* C_PRINTF("Comment: '%s'\n",(*comment)); */ + + wpos=0; + /* check, that line starts with target value or zero, but not with + feature pair */ + if(sscanf(line,"%s",featurepair) == EOF) return(0); + pos=0; + while((featurepair[pos] != ':') && featurepair[pos]) pos++; + if(featurepair[pos] == ':') { + perror ("Line must start with label or 0!!!\n"); + C_PRINTF("LINE: %s\n",line); + C_EXIT (1); + } + /* read the target value */ + if(sscanf(line,"%lf",label) == EOF) return(0); + pos=0; + while(space_or_null((int)line[pos])) pos++; + while((!space_or_null((int)line[pos])) && line[pos]) pos++; + while(((numread=sscanf(line+pos,"%s",featurepair)) != EOF) && + (numread > 0) && + (wpos 0) + (*slackid)=(long)wnum; + else { + perror ("Slack-id must be greater or equal to 1!!!\n"); + C_PRINTF("LINE: %s\n",line); + C_EXIT (1); + } + } + else if(sscanf(featurepair,"cost:%lf%s",&weight,junk)==1) { + /* it is the example-dependent cost factor */ + (*costfactor)=(double)weight; + } + else if(sscanf(featurepair,"%ld:%lf%s",&wnum,&weight,junk)==2) { + /* it is a regular feature */ + if(wnum<=0) { + perror ("Feature numbers must be larger or equal to 1!!!\n"); + C_PRINTF("LINE: %s\n",line); + C_EXIT (1); + } + if((wpos>0) && ((words[wpos-1]).wnum >= wnum)) { + perror ("Features must be in increasing order!!!\n"); + C_PRINTF("LINE: %s\n",line); + C_EXIT (1); + } + (words[wpos]).wnum=wnum; + (words[wpos]).weight=(FVAL)weight; + wpos++; + } + else { + perror ("Cannot parse feature/value pair!!!\n"); + C_PRINTF("'%s' in LINE: %s\n",featurepair,line); + C_EXIT (1); + } + } + (words[wpos]).wnum=0; + (*numwords)=wpos+1; + return(1); +} + +double *read_alphas(char *alphafile,long totdoc) + /* reads the alpha vector from a file as written by the + write_alphas function */ +{ + FILE *fl; + double *alpha; + long dnum; + + if ((fl = fopen (alphafile, "r")) == NULL) + { perror (alphafile); C_EXIT (1); } + + alpha = (double *)my_malloc(sizeof(double)*totdoc); + if(verbosity>=1) { + C_PRINTF("Reading alphas..."); C_FFLUSH(stdout); + } + dnum=0; + while((!feof(fl)) && fscanf(fl,"%lf\n",&alpha[dnum]) && (dnum=1) { + C_PRINTF("done\n"); C_FFLUSH(stdout); + } + + return(alpha); +} + +void nol_ll(char *file, long int *nol, long int *wol, long int *ll) + /* Grep through file and count number of lines, maximum number of + spaces per line, and longest line. */ +{ + FILE *fl; + int ic; + char c; + long current_length,current_wol; + + if ((fl = fopen (file, "r")) == NULL) + { perror (file); C_EXIT (1); } + current_length=0; + current_wol=0; + (*ll)=0; + (*nol)=1; + (*wol)=0; + while((ic=getc(fl)) != EOF) { + c=(char)ic; + current_length++; + if(space_or_null((int)c)) { + current_wol++; + } + if(c == '\n') { + (*nol)++; + if(current_length>(*ll)) { + (*ll)=current_length; + } + if(current_wol>(*wol)) { + (*wol)=current_wol; + } + current_length=0; + current_wol=0; + } + } + fclose(fl); +} + +long minl(long int a, long int b) +{ + if(ab) + return(a); + else + return(b); +} + +long get_runtime(void) +{ + clock_t start; + start = clock(); + return((long)((double)start*100.0/(double)CLOCKS_PER_SEC)); +} + + +# ifdef _MSC_VER + +int isnan(double a) +{ + return(_isnan(a)); +} + +# endif + +int space_or_null(int c) { + if (c==0) + return 1; + return isspace((unsigned char)c); +} + +void *my_malloc(size_t size) +{ + void *ptr; + if(size<=0) size=1; /* for AIX compatibility */ + ptr=(void *)malloc(size); + if(!ptr) { + perror ("Out of memory!\n"); + C_EXIT (1); + } + return(ptr); +} + +void copyright_notice(void) +{ + C_PRINTF("\nCopyright: Thorsten Joachims, thorsten@joachims.org\n\n"); + C_PRINTF("This software is available for non-commercial use only. It must not\n"); + C_PRINTF("be modified and distributed without prior permission of the author.\n"); + C_PRINTF("The author is not responsible for implications from the use of this\n"); + C_PRINTF("software.\n\n"); +} diff --git a/src/svmlight/svm_hideo.c b/src/svmlight/svm_hideo.c new file mode 100644 index 00000000..aa0056b3 --- /dev/null +++ b/src/svmlight/svm_hideo.c @@ -0,0 +1,1064 @@ +/***********************************************************************/ +/* */ +/* svm_hideo.c */ +/* */ +/* The Hildreth and D'Espo solver specialized for SVMs. */ +/* */ +/* Author: Thorsten Joachims */ +/* Date: 02.07.02 */ +/* */ +/* Copyright (c) 2002 Thorsten Joachims - All rights reserved */ +/* */ +/* This software is available for non-commercial use only. It must */ +/* not be modified and distributed without prior permission of the */ +/* author. The author is not responsible for implications from the */ +/* use of this software. */ +/* */ +/***********************************************************************/ + +# include +# include "svm_common.h" + +#include "utils/cutils.h" + +/* + solve the quadratic programming problem + + minimize g0 * x + 1/2 x' * G * x + subject to ce*x = ce0 + l <= x <= u + + The linear constraint vector ce can only have -1/+1 as entries +*/ + +/* Common Block Declarations */ + +long verbosity; + +# define PRIMAL_OPTIMAL 1 +# define DUAL_OPTIMAL 2 +# define MAXITER_EXCEEDED 3 +# define NAN_SOLUTION 4 +# define ONLY_ONE_VARIABLE 5 + +# define LARGEROUND 0 +# define SMALLROUND 1 + +/* /////////////////////////////////////////////////////////////// */ + +# define DEF_PRECISION 1E-5 +# define DEF_MAX_ITERATIONS 200 +# define DEF_LINDEP_SENSITIVITY 1E-8 +# define EPSILON_HIDEO 1E-20 +# define EPSILON_EQ 1E-5 + +double *optimize_qp(QP *, double *, long, double *, LEARN_PARM *); +double *primal=0,*dual=0; +long precision_violations=0; +double opt_precision=DEF_PRECISION; +long maxiter=DEF_MAX_ITERATIONS; +double lindep_sensitivity=DEF_LINDEP_SENSITIVITY; +double *buffer; +long *nonoptimal; + +long smallroundcount=0; +long roundnumber=0; + +/* /////////////////////////////////////////////////////////////// */ + +void *my_malloc(); + +int optimize_hildreth_despo(long,long,double,double,double,long,long,long,double,double *, + double *,double *,double *,double *,double *, + double *,double *,double *,long *,double *,double *); +int solve_dual(long,long,double,double,long,double *,double *,double *, + double *,double *,double *,double *,double *,double *, + double *,double *,double *,double *,long); + +void linvert_matrix(double *, long, double *, double, long *); +void lprint_matrix(double *, long); +void ladd_matrix(double *, long, double); +void lcopy_matrix(double *, long, double *); +void lswitch_rows_matrix(double *, long, long, long); +void lswitchrk_matrix(double *, long, long, long); + +double calculate_qp_objective(long, double *, double *, double *); + + + +double *optimize_qp(qp,epsilon_crit,nx,threshold,learn_parm) +QP *qp; +double *epsilon_crit; +long nx; /* Maximum number of variables in QP */ +double *threshold; +LEARN_PARM *learn_parm; +/* start the optimizer and return the optimal values */ +/* The HIDEO optimizer does not necessarily fully solve the problem. */ +/* Since it requires a strictly positive definite hessian, the solution */ +/* is restricted to a linear independent subset in case the matrix is */ +/* only semi-definite. */ +{ + long i,j; + int result; + double eq,progress; + + roundnumber++; + + if(!primal) { /* allocate memory at first call */ + primal=(double *)my_malloc(sizeof(double)*nx); + dual=(double *)my_malloc(sizeof(double)*((nx+1)*2)); + nonoptimal=(long *)my_malloc(sizeof(long)*(nx)); + buffer=(double *)my_malloc(sizeof(double)*((nx+1)*2*(nx+1)*2+ + nx*nx+2*(nx+1)*2+2*nx+1+2*nx+ + nx+nx+nx*nx)); + (*threshold)=0; + for(i=0;i=4) { /* really verbose */ + C_PRINTF("\n\n"); + eq=qp->opt_ce0[0]; + for(i=0;iopt_n;i++) { + eq+=qp->opt_xinit[i]*qp->opt_ce[i]; + C_PRINTF("%f: ",qp->opt_g0[i]); + for(j=0;jopt_n;j++) { + C_PRINTF("%f ",qp->opt_g[i*qp->opt_n+j]); + } + C_PRINTF(": a=%.10f < %f",qp->opt_xinit[i],qp->opt_up[i]); + C_PRINTF(": y=%f\n",qp->opt_ce[i]); + } + if(qp->opt_m) { + C_PRINTF("EQ: %f*x0",qp->opt_ce[0]); + for(i=1;iopt_n;i++) { + C_PRINTF(" + %f*x%ld",qp->opt_ce[i],i); + } + C_PRINTF(" = %f\n\n",-qp->opt_ce0[0]); + } + } + + result=optimize_hildreth_despo(qp->opt_n,qp->opt_m, + opt_precision,(*epsilon_crit), + learn_parm->epsilon_a,maxiter, + /* (long)PRIMAL_OPTIMAL, */ + (long)0, (long)0, + lindep_sensitivity, + qp->opt_g,qp->opt_g0,qp->opt_ce,qp->opt_ce0, + qp->opt_low,qp->opt_up,primal,qp->opt_xinit, + dual,nonoptimal,buffer,&progress); + if(verbosity>=3) { + C_PRINTF("return(%d)...",result); + } + + if(learn_parm->totwords < learn_parm->svm_maxqpsize) { + /* larger working sets will be linear dependent anyway */ + learn_parm->svm_maxqpsize=maxl(learn_parm->totwords,(long)2); + } + + if(result == NAN_SOLUTION) { + lindep_sensitivity*=2; /* throw out linear dependent examples more */ + /* generously */ + if(learn_parm->svm_maxqpsize>2) { + learn_parm->svm_maxqpsize--; /* decrease size of qp-subproblems */ + } + precision_violations++; + } + + /* take one round of only two variable to get unstuck */ + if((result != PRIMAL_OPTIMAL) || (!(roundnumber % 31)) || (progress <= 0)) { + + smallroundcount++; + + result=optimize_hildreth_despo(qp->opt_n,qp->opt_m, + opt_precision,(*epsilon_crit), + learn_parm->epsilon_a,(long)maxiter, + (long)PRIMAL_OPTIMAL,(long)SMALLROUND, + lindep_sensitivity, + qp->opt_g,qp->opt_g0,qp->opt_ce,qp->opt_ce0, + qp->opt_low,qp->opt_up,primal,qp->opt_xinit, + dual,nonoptimal,buffer,&progress); + if(verbosity>=3) { + C_PRINTF("return_srd(%d)...",result); + } + + if(result != PRIMAL_OPTIMAL) { + if(result != ONLY_ONE_VARIABLE) + precision_violations++; + if(result == MAXITER_EXCEEDED) + maxiter+=100; + if(result == NAN_SOLUTION) { + lindep_sensitivity*=2; /* throw out linear dependent examples more */ + /* generously */ + /* results not valid, so return inital values */ + for(i=0;iopt_n;i++) { + primal[i]=qp->opt_xinit[i]; + } + } + } + } + + + if(precision_violations > 50) { + precision_violations=0; + (*epsilon_crit)*=10.0; + if(verbosity>=1) { + C_PRINTF("\nWARNING: Relaxing epsilon on KT-Conditions (%f).\n", + (*epsilon_crit)); + } + } + + if((qp->opt_m>0) && (result != NAN_SOLUTION) && (!isnan(dual[1]-dual[0]))) + (*threshold)=dual[1]-dual[0]; + else + (*threshold)=0; + + if(verbosity>=4) { /* really verbose */ + C_PRINTF("\n\n"); + eq=qp->opt_ce0[0]; + for(i=0;iopt_n;i++) { + eq+=primal[i]*qp->opt_ce[i]; + C_PRINTF("%f: ",qp->opt_g0[i]); + for(j=0;jopt_n;j++) { + C_PRINTF("%f ",qp->opt_g[i*qp->opt_n+j]); + } + C_PRINTF(": a=%.30f",primal[i]); + C_PRINTF(": nonopti=%ld",nonoptimal[i]); + C_PRINTF(": y=%f\n",qp->opt_ce[i]); + } + C_PRINTF("eq-constraint=%.30f\n",eq); + C_PRINTF("b=%f\n",(*threshold)); + C_PRINTF(" smallroundcount=%ld ",smallroundcount); + } + + return(primal); +} + + + +int optimize_hildreth_despo(n,m,precision,epsilon_crit,epsilon_a,maxiter,goal, + smallround,lindep_sensitivity,g,g0,ce,ce0,low,up, + primal,init,dual,lin_dependent,buffer,progress) + long n; /* number of variables */ + long m; /* number of linear equality constraints [0,1] */ + double precision; /* solve at least to this dual precision */ + double epsilon_crit; /* stop, if KT-Conditions approx fulfilled */ + double epsilon_a; /* precision of alphas at bounds */ + long maxiter; /* stop after this many iterations */ + long goal; /* keep going until goal fulfilled */ + long smallround; /* use only two variables of steepest descent */ + double lindep_sensitivity; /* epsilon for detecting linear dependent ex */ + double *g; /* hessian of objective */ + double *g0; /* linear part of objective */ + double *ce,*ce0; /* linear equality constraints */ + double *low,*up; /* box constraints */ + double *primal; /* primal variables */ + double *init; /* initial values of primal */ + double *dual; /* dual variables */ + long *lin_dependent; + double *buffer; + double *progress; /* delta in the objective function between + before and after */ +{ + long i,j,k,from,to,n_indep,changed; + double sum,bmin=0,bmax=0; + double *d,*d0,*ig,*dual_old,*temp,*start; + double *g0_new,*g_new,*ce_new,*ce0_new,*low_new,*up_new; + double add,t; + int result; + double obj_before,obj_after; + long b1,b2; + double g0_b1,g0_b2,ce0_b; + + g0_new=&(buffer[0]); /* claim regions of buffer */ + d=&(buffer[n]); + d0=&(buffer[n+(n+m)*2*(n+m)*2]); + ce_new=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2]); + ce0_new=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n]); + ig=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m]); + dual_old=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n]); + low_new=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n+(n+m)*2]); + up_new=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n+(n+m)*2+n]); + start=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n+(n+m)*2+n+n]); + g_new=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n+(n+m)*2+n+n+n]); + temp=&(buffer[n+(n+m)*2*(n+m)*2+(n+m)*2+n+m+n*n+(n+m)*2+n+n+n+n*n]); + + b1=-1; + b2=-1; + for(i=0;i=( up[i]-epsilon_a)) && (ce[i]>0.0))) + ) { + bmin=sum; + b1=i; + } + if(((b2==-1) || (sum>=bmax)) + && (!((init[i]<=(low[i]+epsilon_a)) && (ce[i]>0.0))) + && (!((init[i]>=( up[i]-epsilon_a)) && (ce[i]<0.0))) + ) { + bmax=sum; + b2=i; + } + } + /* in case of unbiased hyperplane, the previous projection on */ + /* equality constraint can lead to b1 or b2 being -1. */ + if((b1 == -1) || (b2 == -1)) { + b1=maxl(b1,b2); + b2=maxl(b1,b2); + } + + for(i=0;i g0_b2) { /* set b2 to upper bound */ + /* C_PRINTF("case +=>\n"); */ + changed=1; + t=up[b2]-init[b2]; + if((init[b1]-low[b1]) < t) { + t=init[b1]-low[b1]; + } + start[b1]=init[b1]-t; + start[b2]=init[b2]+t; + } + } + else if(((g[b1*n+b1]>0) || (g[b2*n+b2]>0))) { /* (ce[b1] != ce[b2]) */ + /* C_PRINTF("case +!\n"); */ + t=((ce[b2]/ce[b1])*g0[b1]-g0[b2]+ce0[0]*(g[b1*n+b1]*ce[b2]/ce[b1]-g[b1*n+b2]/ce[b1]))/((ce[b2]*ce[b2]/(ce[b1]*ce[b1]))*g[b1*n+b1]+g[b2*n+b2]-2*(g[b1*n+b2]*ce[b2]/ce[b1]))-init[b2]; + changed=1; + if((up[b2]-init[b2]) < t) { + t=up[b2]-init[b2]; + } + if((init[b2]-low[b2]) < -t) { + t=-(init[b2]-low[b2]); + } + if((up[b1]-init[b1]) < t) { + t=(up[b1]-init[b1]); + } + if((init[b1]-low[b1]) < -t) { + t=-(init[b1]-low[b1]); + } + start[b1]=init[b1]+t; + start[b2]=init[b2]+t; + } + } + if((-g[b1*n+b2] == g[b1*n+b1]) && (-g[b1*n+b2] == g[b2*n+b2])) { + /* C_PRINTF("diffeuqal\n"); */ + if(ce[b1] != ce[b2]) { + if((g0_b1+g0_b2) < 0) { /* set b1 and b2 to upper bound */ + /* C_PRINTF("case -!<\n"); */ + changed=1; + t=up[b1]-init[b1]; + if((up[b2]-init[b2]) < t) { + t=up[b2]-init[b2]; + } + start[b1]=init[b1]+t; + start[b2]=init[b2]+t; + } + else if((g0_b1+g0_b2) >= 0) { /* set b1 and b2 to lower bound */ + /* C_PRINTF("case -!>\n"); */ + changed=1; + t=init[b1]-low[b1]; + if((init[b2]-low[b2]) < t) { + t=init[b2]-low[b2]; + } + start[b1]=init[b1]-t; + start[b2]=init[b2]-t; + } + } + else if(((g[b1*n+b1]>0) || (g[b2*n+b2]>0))) { /* (ce[b1]==ce[b2]) */ + /* C_PRINTF("case -=\n"); */ + t=((ce[b2]/ce[b1])*g0[b1]-g0[b2]+ce0[0]*(g[b1*n+b1]*ce[b2]/ce[b1]-g[b1*n+b2]/ce[b1]))/((ce[b2]*ce[b2]/(ce[b1]*ce[b1]))*g[b1*n+b1]+g[b2*n+b2]-2*(g[b1*n+b2]*ce[b2]/ce[b1]))-init[b2]; + changed=1; + if((up[b2]-init[b2]) < t) { + t=up[b2]-init[b2]; + } + if((init[b2]-low[b2]) < -t) { + t=-(init[b2]-low[b2]); + } + if((up[b1]-init[b1]) < -t) { + t=-(up[b1]-init[b1]); + } + if((init[b1]-low[b1]) < t) { + t=init[b1]-low[b1]; + } + start[b1]=init[b1]-t; + start[b2]=init[b2]+t; + } + } + } + /* if we have a biased hyperplane, then adding a constant to the */ + /* hessian does not change the solution. So that is done for examples */ + /* with zero diagonal entry, since HIDEO cannot handle them. */ + if((m>0) + && ((fabs(g[b1*n+b1]) < lindep_sensitivity) + || (fabs(g[b2*n+b2]) < lindep_sensitivity))) { + /* C_PRINTF("Case 0\n"); */ + add+=0.093274; + } + /* in case both examples are linear dependent */ + else if((m>0) + && (g[b1*n+b2] != 0 && g[b2*n+b2] != 0) + && (fabs(g[b1*n+b1]/g[b1*n+b2] - g[b1*n+b2]/g[b2*n+b2]) + < lindep_sensitivity)) { + /* C_PRINTF("Case lindep\n"); */ + add+=0.078274; + } + + /* special case for zero diagonal entry on unbiased hyperplane */ + if((m==0) && (b1>=0)) { + if(fabs(g[b1*n+b1]) < lindep_sensitivity) { + /* C_PRINTF("Case 0b1\n"); */ + for(i=0;i=0) + start[b1]=low[b1]; + } + } + if((m==0) && (b2>=0)) { + if(fabs(g[b2*n+b2]) < lindep_sensitivity) { + /* C_PRINTF("Case 0b2\n"); */ + for(i=0;i=0) + start[b2]=low[b2]; + } + } + + /* C_PRINTF("b1=%ld,b2=%ld\n",b1,b2); */ + + lcopy_matrix(g,n,d); + if((m==1) && (add>0.0)) { + for(j=0;j2) { /* switch, so that variables are better mixed */ + lswitchrk_matrix(d,n,b1,(long)0); + if(b2 == 0) + lswitchrk_matrix(d,n,b1,(long)1); + else + lswitchrk_matrix(d,n,b2,(long)1); + } + if(smallround == SMALLROUND) { + for(i=2;i0) { /* for biased hyperplane, pick two variables */ + lin_dependent[0]=0; + lin_dependent[1]=0; + } + else { /* for unbiased hyperplane, pick only one variable */ + lin_dependent[0]=smallroundcount % 2; + lin_dependent[1]=(smallroundcount+1) % 2; + } + } + else { + for(i=0;i2) { /* now switch back */ + if(b2 == 0) { + lswitchrk_matrix(ig,n,b1,(long)1); + i=lin_dependent[1]; + lin_dependent[1]=lin_dependent[b1]; + lin_dependent[b1]=i; + } + else { + lswitchrk_matrix(ig,n,b2,(long)1); + i=lin_dependent[1]; + lin_dependent[1]=lin_dependent[b2]; + lin_dependent[b2]=i; + } + lswitchrk_matrix(ig,n,b1,(long)0); + i=lin_dependent[0]; + lin_dependent[0]=lin_dependent[b1]; + lin_dependent[b1]=i; + } + /* lprint_matrix(d,n); */ + /* lprint_matrix(ig,n); */ + + lcopy_matrix(g,n,g_new); /* restore g_new matrix */ + if(add>0) + for(j=0;j0) ce0_new[0]=-ce0[0]; + for(i=0;i0) ce0_new[0]-=(start[i]*ce[i]); + } + } + from=0; /* remove linear dependent vectors */ + to=0; + n_indep=0; + for(i=0;i=3) { + C_PRINTF("real_qp_size(%ld)...",n_indep); + } + + /* cannot optimize with only one variable */ + if((n_indep<=1) && (m>0) && (!changed)) { + for(i=n-1;i>=0;i--) { + primal[i]=init[i]; + } + return((int)ONLY_ONE_VARIABLE); + } + + if((!changed) || (n_indep>1)) { + result=solve_dual(n_indep,m,precision,epsilon_crit,maxiter,g_new,g0_new, + ce_new,ce0_new,low_new,up_new,primal,d,d0,ig, + dual,dual_old,temp,goal); + } + else { + result=PRIMAL_OPTIMAL; + } + + j=n_indep; + for(i=n-1;i>=0;i--) { + if(!lin_dependent[i]) { + j--; + primal[i]=primal[j]; + } + else { + primal[i]=start[i]; /* leave as is */ + } + temp[i]=primal[i]; + } + + obj_before=calculate_qp_objective(n,g,g0,init); + obj_after=calculate_qp_objective(n,g,g0,primal); + (*progress)=obj_before-obj_after; + if(verbosity>=3) { + C_PRINTF("before(%.30f)...after(%.30f)...result_sd(%d)...", + obj_before,obj_after,result); + } + + return((int)result); +} + + +int solve_dual(n,m,precision,epsilon_crit,maxiter,g,g0,ce,ce0,low,up,primal, + d,d0,ig,dual,dual_old,temp,goal) + /* Solves the dual using the method of Hildreth and D'Espo. */ + /* Can only handle problems with zero or exactly one */ + /* equality constraints. */ + + long n; /* number of variables */ + long m; /* number of linear equality constraints */ + double precision; /* solve at least to this dual precision */ + double epsilon_crit; /* stop, if KT-Conditions approx fulfilled */ + long maxiter; /* stop after that many iterations */ + double *g; + double *g0; /* linear part of objective */ + double *ce,*ce0; /* linear equality constraints */ + double *low,*up; /* box constraints */ + double *primal; /* variables (with initial values) */ + double *d,*d0,*ig,*dual,*dual_old,*temp; /* buffer */ + long goal; +{ + long i,j,k,iter; + double sum,w,maxviol,viol,temp1,temp2,isnantest; + double model_b,dist; + long retrain,maxfaktor,primal_optimal=0,at_bound,scalemaxiter; + double epsilon_a=1E-15,epsilon_hideo; + double eq; + + if((m<0) || (m>1)) + perror("SOLVE DUAL: inappropriate number of eq-constrains!"); + + /* + C_PRINTF("\n"); + for(i=0;i0) { + sum=0; /* dual hessian for eq constraints */ + for(j=0;j0) { + sum=0; /* dual linear component for eq constraints */ + for(j=0;j 0) && (iter < (scalemaxiter*maxfaktor))) { + iter++; + + while((maxviol > precision) && (iter < (scalemaxiter*maxfaktor))) { + iter++; + maxviol=0; + for(i=0;i<2*(n+m);i++) { + sum=d0[i]; + for(j=0;j<2*(n+m);j++) { + sum+=d[i*2*(n+m)+j]*dual_old[j]; + } + sum-=d[i*2*(n+m)+i]*dual_old[i]; + dual[i]=-sum/d[i*2*(n+m)+i]; + if(dual[i]<0) dual[i]=0; + + viol=fabs(dual[i]-dual_old[i]); + if(viol>maxviol) + maxviol=viol; + dual_old[i]=dual[i]; + } + /* + C_PRINTF("%d) maxviol=%20f precision=%f\n",iter,maxviol,precision); + */ + } + + if(m>0) { + for(i=0;i=(up[i])) { + primal[i]=up[i]; + } + } + + if(m>0) + model_b=dual[n+n+1]-dual[n+n]; + else + model_b=0; + + epsilon_hideo=EPSILON_HIDEO; + for(i=0;i(low[i]+epsilon_hideo)) &&(dist>(1.0+epsilon_crit))) { + epsilon_hideo=(primal[i]-low[i])*2.0; + } + } + /* C_PRINTF("\nEPSILON_HIDEO=%.30f\n",epsilon_hideo); */ + + for(i=0;i=(up[i]-epsilon_hideo)) { + primal[i]=up[i]; + } + } + + retrain=0; + primal_optimal=1; + at_bound=0; + for(i=0;(i(low[i]+epsilon_a)) && (dist > (1.0+epsilon_crit))) { + retrain=1; + primal_optimal=0; + } + if((primal[i]<=(low[i]+epsilon_a)) || (primal[i]>=(up[i]-epsilon_a))) { + at_bound++; + } + /* C_PRINTF("HIDEOtemp: a[%ld]=%.30f, dist=%.6f, b=%f, at_bound=%ld\n",i,primal[i],dist,model_b,at_bound); */ + } + if(m>0) { + eq=-ce0[0]; /* check precision of eq-constraint */ + for(i=0;i=(up[i]-epsilon_a)) { + primal[i]=up[i]; + } + } + } + + isnantest=0; + for(i=0;i0) { + temp1=dual[n+n+1]; /* copy the dual variables for the eq */ + temp2=dual[n+n]; /* constraints to a handier location */ + for(i=n+n+1;i>=2;i--) { + dual[i]=dual[i-2]; + } + dual[0]=temp2; + dual[1]=temp1; + isnantest+=temp1+temp2; + } + + if(isnan(isnantest)) { + return((int)NAN_SOLUTION); + } + else if(primal_optimal) { + return((int)PRIMAL_OPTIMAL); + } + else if(maxviol == 0.0) { + return((int)DUAL_OPTIMAL); + } + else { + return((int)MAXITER_EXCEEDED); + } +} + + +void linvert_matrix(matrix,depth,inverse,lindep_sensitivity,lin_dependent) +double *matrix; +long depth; +double *inverse,lindep_sensitivity; +long *lin_dependent; /* indicates the active parts of matrix on + input and output*/ +{ + long i,j,k; + double factor; + + for(i=0;i=0;i--) { + if(!lin_dependent[i]) { + factor=1/matrix[i*depth+i]; + for(k=0;k=0;j--) { + factor=matrix[j*depth+i]; + matrix[j*depth+i]=0; + for(k=0;ktotwords=totwords; + + /* make sure -n value is reasonable */ + if((learn_parm->svm_newvarsinqp < 2) + || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) { + learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize; + } + + init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK); + + label = (long *)my_malloc(sizeof(long)*totdoc); + inconsistent = (long *)my_malloc(sizeof(long)*totdoc); + unlabeled = (long *)my_malloc(sizeof(long)*totdoc); + c = (double *)my_malloc(sizeof(double)*totdoc); + a = (double *)my_malloc(sizeof(double)*totdoc); + a_fullset = (double *)my_malloc(sizeof(double)*totdoc); + xi_fullset = (double *)my_malloc(sizeof(double)*totdoc); + lin = (double *)my_malloc(sizeof(double)*totdoc); + learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc); + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2)); + model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2)); + model->index = (long *)my_malloc(sizeof(long)*(totdoc+2)); + + model->at_upper_bound=0; + model->b=0; + model->supvec[0]=0; /* element 0 reserved and empty for now */ + model->alpha[0]=0; + model->lin_weights=NULL; + model->totwords=totwords; + model->totdoc=totdoc; + model->kernel_parm=(*kernel_parm); + model->sv_num=1; + model->loo_error=-1; + model->loo_recall=-1; + model->loo_precision=-1; + model->xa_error=-1; + model->xa_recall=-1; + model->xa_precision=-1; + inconsistentnum=0; + transduction=0; + + r_delta=estimate_r_delta(docs,totdoc,kernel_parm); + r_delta_sq=r_delta*r_delta; + + r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm); + if(learn_parm->svm_c == 0.0) { /* default value for C */ + learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg); + if(verbosity>=1) + C_PRINTF("Setting default regularization parameter C=%.4f\n", + learn_parm->svm_c); + } + + learn_parm->eps=-1.0; /* equivalent regression epsilon for + classification */ + + for(i=0;idocnum=i; + inconsistent[i]=0; + a[i]=0; + lin[i]=0; + c[i]=0.0; + unlabeled[i]=0; + if(class[i] == 0) { + unlabeled[i]=1; + label[i]=0; + transduction=1; + } + if(class[i] > 0) { + learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio* + docs[i]->costfactor; + label[i]=1; + trainpos++; + } + else if(class[i] < 0) { + learn_parm->svm_cost[i]=learn_parm->svm_c*docs[i]->costfactor; + label[i]=-1; + trainneg++; + } + else { + learn_parm->svm_cost[i]=0; + } + } + if(verbosity>=2) { + C_PRINTF("%ld positive, %ld negative, and %ld unlabeled examples.\n",trainpos,trainneg,totdoc-trainpos-trainneg); C_FFLUSH(stdout); + C_PRINTF("transductive=%ld, ", transduction); + } + + /* caching makes no sense for linear kernel */ + if(kernel_parm->kernel_type == LINEAR) { + kernel_cache = NULL; + } + + /* compute starting state for initial alpha values */ + if(alpha) { + if(verbosity>=1) { + C_PRINTF("Computing starting state..."); C_FFLUSH(stdout); + } + index = (long *)my_malloc(sizeof(long)*totdoc); + index2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + weights=(double *)my_malloc(sizeof(double)*(totwords+1)); + aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc); + for(i=0;ilearn_parm->svm_cost[i]) alpha[i]=learn_parm->svm_cost[i]; + } + if(kernel_parm->kernel_type != LINEAR) { + for(i=0;i0) && (alpha[i]svm_cost[i]) + && (kernel_cache_space_available(kernel_cache))) + cache_kernel_row(kernel_cache,docs,i,kernel_parm); + for(i=0;isvm_cost[i]) + && (kernel_cache_space_available(kernel_cache))) + cache_kernel_row(kernel_cache,docs,i,kernel_parm); + } + (void)compute_index(index,totdoc,index2dnum); + update_linear_component(docs,label,index2dnum,alpha,a,index2dnum,totdoc, + totwords,kernel_parm,kernel_cache,lin,aicache, + weights); + (void)calculate_svm_model(docs,label,unlabeled,lin,alpha,a,c, + learn_parm,index2dnum,index2dnum,model); + for(i=0;i=1) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + } + } + + if(transduction) { + learn_parm->svm_iter_to_shrink=99999999; + if(verbosity >= 1) + C_PRINTF("\nDeactivating Shrinking due to an incompatibility with the transductive \nlearner in the current version.\n\n"); + } + + if(transduction && learn_parm->compute_loo) { + learn_parm->compute_loo=0; + if(verbosity >= 1) + C_PRINTF("\nCannot compute leave-one-out estimates for transductive learner.\n\n"); + } + + if(learn_parm->remove_inconsistent && learn_parm->compute_loo) { + learn_parm->compute_loo=0; + C_PRINTF("\nCannot compute leave-one-out estimates when removing inconsistent examples.\n\n"); + } + + if(learn_parm->compute_loo && ((trainpos == 1) || (trainneg == 1))) { + learn_parm->compute_loo=0; + C_PRINTF("\nCannot compute leave-one-out with only one example in one class.\n\n"); + } + + + if(verbosity==1) { + C_PRINTF("Optimizing"); C_FFLUSH(stdout); + } + + /* train the svm */ + iterations=optimize_to_convergence(docs,label,totdoc,totwords,learn_parm, + kernel_parm,kernel_cache,&shrink_state,model, + inconsistent,unlabeled,a,lin, + c,&timing_profile, + &maxdiff,(long)-1, + (long)1); + + learn_parm->iterations=iterations; + + if(verbosity>=1) { + if(verbosity==1) C_PRINTF("done. (%ld iterations)\n",iterations); + + misclassified=0; + for(i=0;(ib)*(double)label[i] <= 0.0) + misclassified++; + } + + C_PRINTF("Optimization finished (%ld misclassified, maxdiff=%.5f).\n", + misclassified,maxdiff); + + runtime_end=get_runtime(); + if(verbosity>=2) { + C_PRINTF("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n", + ((float)runtime_end-(float)runtime_start)/100.0, + (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start)); + } + else { + C_PRINTF("Runtime in cpu-seconds: %.2f\n", + (runtime_end-runtime_start)/100.0); + } + + if(learn_parm->remove_inconsistent) { + inconsistentnum=0; + for(i=0;isv_num-1,inconsistentnum); + } + else { + upsupvecnum=0; + for(i=1;isv_num;i++) { + if(fabs(model->alpha[i]) >= + (learn_parm->svm_cost[(model->supvec[i])->docnum]- + learn_parm->epsilon_a)) + upsupvecnum++; + } + C_PRINTF("Number of SV: %ld (including %ld at upper bound)\n", + model->sv_num-1,upsupvecnum); + } + + if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) { + loss=0; + model_length=0; + for(i=0;ib)*(double)label[i] < 1.0-learn_parm->epsilon_crit) + loss+=1.0-(lin[i]-model->b)*(double)label[i]; + model_length+=a[i]*label[i]*lin[i]; + } + model_length=sqrt(model_length); + C_FPRINTF(stdout,"L1 loss: loss=%.5f\n",loss); + C_FPRINTF(stdout,"Norm of weight vector: |w|=%.5f\n",model_length); + example_length=estimate_sphere(model,kernel_parm); + C_FPRINTF(stdout,"Norm of longest example vector: |x|=%.5f\n", + length_of_longest_document_vector(docs,totdoc,kernel_parm)); + C_FPRINTF(stdout,"Estimated VCdim of classifier: VCdim<=%.5f\n", + estimate_margin_vcdim(model,model_length,example_length, + kernel_parm)); + if((!learn_parm->remove_inconsistent) && (!transduction)) { + runtime_start_xa=get_runtime(); + if(verbosity>=1) { + C_PRINTF("Computing XiAlpha-estimates..."); C_FFLUSH(stdout); + } + compute_xa_estimates(model,label,unlabeled,totdoc,docs,lin,a, + kernel_parm,learn_parm,&(model->xa_error), + &(model->xa_recall),&(model->xa_precision)); + if(verbosity>=1) { + C_PRINTF("done\n"); + } + C_PRINTF("Runtime for XiAlpha-estimates in cpu-seconds: %.2f\n", + (get_runtime()-runtime_start_xa)/100.0); + + C_FPRINTF(stdout,"XiAlpha-estimate of the error: error<=%.2f%% (rho=%.2f,depth=%ld)\n", + model->xa_error,learn_parm->rho,learn_parm->xa_depth); + C_FPRINTF(stdout,"XiAlpha-estimate of the recall: recall=>%.2f%% (rho=%.2f,depth=%ld)\n", + model->xa_recall,learn_parm->rho,learn_parm->xa_depth); + C_FPRINTF(stdout,"XiAlpha-estimate of the precision: precision=>%.2f%% (rho=%.2f,depth=%ld)\n", + model->xa_precision,learn_parm->rho,learn_parm->xa_depth); + } + else if(!learn_parm->remove_inconsistent) { + estimate_transduction_quality(model,label,unlabeled,totdoc,docs,lin); + } + } + if(verbosity>=1) { + C_PRINTF("Number of kernel evaluations: %ld\n",kernel_cache_statistic); + } + } + + + /* leave-one-out testing starts now */ + if(learn_parm->compute_loo) { + /* save results of training on full dataset for leave-one-out */ + runtime_start_loo=get_runtime(); + for(i=0;ib)*(double)label[i]); + if(xi_fullset[i]<0) xi_fullset[i]=0; + a_fullset[i]=a[i]; + } + if(verbosity>=1) { + C_PRINTF("Computing leave-one-out"); + } + + /* repeat this loop for every held-out example */ + for(heldout=0;(heldoutrho*a_fullset[heldout]*r_delta_sq+xi_fullset[heldout] + < 1.0) { + /* guaranteed to not produce a leave-one-out error */ + if(verbosity==1) { + C_PRINTF("+"); C_FFLUSH(stdout); + } + } + else if(xi_fullset[heldout] > 1.0) { + /* guaranteed to produce a leave-one-out error */ + loo_count++; + if(label[heldout] > 0) loo_count_pos++; else loo_count_neg++; + if(verbosity==1) { + C_PRINTF("-"); C_FFLUSH(stdout); + } + } + else { + loocomputed++; + heldout_c=learn_parm->svm_cost[heldout]; /* set upper bound to zero */ + learn_parm->svm_cost[heldout]=0; + /* make sure heldout example is not currently */ + /* shrunk away. Assumes that lin is up to date! */ + shrink_state.active[heldout]=1; + if(verbosity>=2) + C_PRINTF("\nLeave-One-Out test on example %ld\n",heldout); + if(verbosity>=1) { + C_PRINTF("(?[%ld]",heldout); C_FFLUSH(stdout); + } + + optimize_to_convergence(docs,label,totdoc,totwords,learn_parm, + kernel_parm, + kernel_cache,&shrink_state,model,inconsistent,unlabeled, + a,lin,c,&timing_profile, + &maxdiff,heldout,(long)2); + + /* C_PRINTF("%.20f\n",(lin[heldout]-model->b)*(double)label[heldout]); */ + + if(((lin[heldout]-model->b)*(double)label[heldout]) <= 0.0) { + loo_count++; /* there was a loo-error */ + if(label[heldout] > 0) loo_count_pos++; else loo_count_neg++; + if(verbosity>=1) { + C_PRINTF("-)"); C_FFLUSH(stdout); + } + } + else { + if(verbosity>=1) { + C_PRINTF("+)"); C_FFLUSH(stdout); + } + } + /* now we need to restore the original data set*/ + learn_parm->svm_cost[heldout]=heldout_c; /* restore upper bound */ + } + } /* end of leave-one-out loop */ + + + if(verbosity>=1) { + C_PRINTF("\nRetrain on full problem"); C_FFLUSH(stdout); + } + optimize_to_convergence(docs,label,totdoc,totwords,learn_parm, + kernel_parm, + kernel_cache,&shrink_state,model,inconsistent,unlabeled, + a,lin,c,&timing_profile, + &maxdiff,(long)-1,(long)1); + if(verbosity >= 1) + C_PRINTF("done.\n"); + + + /* after all leave-one-out computed */ + model->loo_error=100.0*loo_count/(double)totdoc; + model->loo_recall=(1.0-(double)loo_count_pos/(double)trainpos)*100.0; + model->loo_precision=(trainpos-loo_count_pos)/ + (double)(trainpos-loo_count_pos+loo_count_neg)*100.0; + if(verbosity >= 1) { + C_FPRINTF(stdout,"Leave-one-out estimate of the error: error=%.2f%%\n", + model->loo_error); + C_FPRINTF(stdout,"Leave-one-out estimate of the recall: recall=%.2f%%\n", + model->loo_recall); + C_FPRINTF(stdout,"Leave-one-out estimate of the precision: precision=%.2f%%\n", + model->loo_precision); + C_FPRINTF(stdout,"Actual leave-one-outs computed: %ld (rho=%.2f)\n", + loocomputed,learn_parm->rho); + C_PRINTF("Runtime for leave-one-out in cpu-seconds: %.2f\n", + (double)(get_runtime()-runtime_start_loo)/100.0); + } + } + + if(learn_parm->alphafile[0]) + write_alphas(learn_parm->alphafile,a,label,totdoc); + + shrink_state_cleanup(&shrink_state); + free(label); + free(inconsistent); + free(unlabeled); + free(c); + free(a); + free(a_fullset); + free(xi_fullset); + free(lin); + free(learn_parm->svm_cost); +} + + +/* Learns an SVM regression model based on the training data in + docs/label. The resulting model is returned in the structure + model. */ + +void svm_learn_regression(DOC **docs, double *value, long int totdoc, + long int totwords, LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE **kernel_cache, MODEL *model) + /* docs: Training vectors (x-part) */ + /* class: Training value (y-part) */ + /* totdoc: Number of examples in docs/label */ + /* totwords: Number of features (i.e. highest feature index) */ + /* learn_parm: Learning paramenters */ + /* kernel_parm: Kernel paramenters */ + /* kernel_cache:Initialized Cache, if using a kernel. NULL if + linear. Note that it will be free'd and reassigned */ + /* model: Returns learning result (assumed empty before called) */ +{ + long *inconsistent,i,j; + long inconsistentnum; + long upsupvecnum; + double loss,model_length,example_length; + double maxdiff,*lin,*a,*c; + long runtime_start,runtime_end; + long iterations,kernel_cache_size; + long *unlabeled; + double r_delta_sq=0,r_delta,r_delta_avg; + double *xi_fullset; /* buffer for storing xi on full sample in loo */ + double *a_fullset; /* buffer for storing alpha on full sample in loo */ + TIMING timing_profile; + SHRINK_STATE shrink_state; + DOC **docs_org; + long *label; + + /* set up regression problem in standard form */ + docs_org=docs; + docs = (DOC **)my_malloc(sizeof(DOC)*2*totdoc); + label = (long *)my_malloc(sizeof(long)*2*totdoc); + c = (double *)my_malloc(sizeof(double)*2*totdoc); + for(i=0;icostfactor,docs_org[i]->fvec); + label[i]=+1; + c[i]=value[i]; + docs[j]=create_example(j,0,0,docs_org[i]->costfactor,docs_org[i]->fvec); + label[j]=-1; + c[j]=value[i]; + } + totdoc*=2; + + /* need to get a bigger kernel cache */ + if(*kernel_cache) { + kernel_cache_size=(*kernel_cache)->buffsize*sizeof(CFLOAT)/(1024*1024); + kernel_cache_cleanup(*kernel_cache); + (*kernel_cache)=kernel_cache_init(totdoc,kernel_cache_size); + } + + runtime_start=get_runtime(); + timing_profile.time_kernel=0; + timing_profile.time_opti=0; + timing_profile.time_shrink=0; + timing_profile.time_update=0; + timing_profile.time_model=0; + timing_profile.time_check=0; + timing_profile.time_select=0; + kernel_cache_statistic=0; + + learn_parm->totwords=totwords; + + /* make sure -n value is reasonable */ + if((learn_parm->svm_newvarsinqp < 2) + || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) { + learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize; + } + + init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK); + + inconsistent = (long *)my_malloc(sizeof(long)*totdoc); + unlabeled = (long *)my_malloc(sizeof(long)*totdoc); + a = (double *)my_malloc(sizeof(double)*totdoc); + a_fullset = (double *)my_malloc(sizeof(double)*totdoc); + xi_fullset = (double *)my_malloc(sizeof(double)*totdoc); + lin = (double *)my_malloc(sizeof(double)*totdoc); + learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc); + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2)); + model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2)); + model->index = (long *)my_malloc(sizeof(long)*(totdoc+2)); + + model->at_upper_bound=0; + model->b=0; + model->supvec[0]=0; /* element 0 reserved and empty for now */ + model->alpha[0]=0; + model->lin_weights=NULL; + model->totwords=totwords; + model->totdoc=totdoc; + model->kernel_parm=(*kernel_parm); + model->sv_num=1; + model->loo_error=-1; + model->loo_recall=-1; + model->loo_precision=-1; + model->xa_error=-1; + model->xa_recall=-1; + model->xa_precision=-1; + inconsistentnum=0; + + r_delta=estimate_r_delta(docs,totdoc,kernel_parm); + r_delta_sq=r_delta*r_delta; + + r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm); + if(learn_parm->svm_c == 0.0) { /* default value for C */ + learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg); + if(verbosity>=1) + C_PRINTF("Setting default regularization parameter C=%.4f\n", + learn_parm->svm_c); + } + + for(i=0;i 0) { + learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio* + docs[i]->costfactor; + } + else if(label[i] < 0) { + learn_parm->svm_cost[i]=learn_parm->svm_c*docs[i]->costfactor; + } + } + + /* caching makes no sense for linear kernel */ + if((kernel_parm->kernel_type == LINEAR) && (*kernel_cache)) { + C_PRINTF("WARNING: Using a kernel cache for linear case will slow optimization down!\n"); + } + + if(verbosity==1) { + C_PRINTF("Optimizing"); C_FFLUSH(stdout); + } + + /* train the svm */ + iterations=optimize_to_convergence(docs,label,totdoc,totwords,learn_parm, + kernel_parm,*kernel_cache,&shrink_state, + model,inconsistent,unlabeled,a,lin,c, + &timing_profile,&maxdiff,(long)-1, + (long)1); + learn_parm->iterations=iterations; + + if(verbosity>=1) { + if(verbosity==1) C_PRINTF("done. (%ld iterations)\n",iterations); + + C_PRINTF("Optimization finished (maxdiff=%.5f).\n",maxdiff); + + runtime_end=get_runtime(); + if(verbosity>=2) { + C_PRINTF("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n", + ((float)runtime_end-(float)runtime_start)/100.0, + (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start)); + } + else { + C_PRINTF("Runtime in cpu-seconds: %.2f\n", + (runtime_end-runtime_start)/100.0); + } + + if(learn_parm->remove_inconsistent) { + inconsistentnum=0; + for(i=0;isv_num-1,inconsistentnum); + } + else { + upsupvecnum=0; + for(i=1;isv_num;i++) { + if(fabs(model->alpha[i]) >= + (learn_parm->svm_cost[(model->supvec[i])->docnum]- + learn_parm->epsilon_a)) + upsupvecnum++; + } + C_PRINTF("Number of SV: %ld (including %ld at upper bound)\n", + model->sv_num-1,upsupvecnum); + } + + if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) { + loss=0; + model_length=0; + for(i=0;ib)*(double)label[i] < (-learn_parm->eps+(double)label[i]*c[i])-learn_parm->epsilon_crit) + loss+=-learn_parm->eps+(double)label[i]*c[i]-(lin[i]-model->b)*(double)label[i]; + model_length+=a[i]*label[i]*lin[i]; + } + model_length=sqrt(model_length); + C_FPRINTF(stdout,"L1 loss: loss=%.5f\n",loss); + C_FPRINTF(stdout,"Norm of weight vector: |w|=%.5f\n",model_length); + example_length=estimate_sphere(model,kernel_parm); + C_FPRINTF(stdout,"Norm of longest example vector: |x|=%.5f\n", + length_of_longest_document_vector(docs,totdoc,kernel_parm)); + } + if(verbosity>=1) { + C_PRINTF("Number of kernel evaluations: %ld\n",kernel_cache_statistic); + } + } + + if(learn_parm->alphafile[0]) + write_alphas(learn_parm->alphafile,a,label,totdoc); + + /* this makes sure the model we return does not contain pointers to the + temporary documents */ + for(i=1;isv_num;i++) { + j=model->supvec[i]->docnum; + if(j >= (totdoc/2)) { + j=totdoc-j-1; + } + model->supvec[i]=docs_org[j]; + } + + shrink_state_cleanup(&shrink_state); + for(i=0;isvm_cost); +} + +void svm_learn_ranking(DOC **docs, double *rankvalue, long int totdoc, + long int totwords, LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, KERNEL_CACHE **kernel_cache, + MODEL *model) + /* docs: Training vectors (x-part) */ + /* rankvalue: Training target values that determine the ranking */ + /* totdoc: Number of examples in docs/label */ + /* totwords: Number of features (i.e. highest feature index) */ + /* learn_parm: Learning paramenters */ + /* kernel_parm: Kernel paramenters */ + /* kernel_cache:Initialized pointer to Cache of size 1*totdoc, if + using a kernel. NULL if linear. NOTE: Cache is + getting reinitialized in this function */ + /* model: Returns learning result (assumed empty before called) */ +{ + DOC **docdiff; + long i,j,k,totpair,kernel_cache_size; + double *target,*alpha,cost; + long *greater,*lesser; + MODEL *pairmodel; + SVECTOR *flow,*fhigh; + + totpair=0; + for(i=0;iqueryid==docs[j]->queryid) && (rankvalue[i] != rankvalue[j])) { + totpair++; + } + } + } + + C_PRINTF("Constructing %ld rank constraints...",totpair); C_FFLUSH(stdout); + docdiff=(DOC **)my_malloc(sizeof(DOC)*totpair); + target=(double *)my_malloc(sizeof(double)*totpair); + greater=(long *)my_malloc(sizeof(long)*totpair); + lesser=(long *)my_malloc(sizeof(long)*totpair); + + k=0; + for(i=0;iqueryid == docs[j]->queryid) { + cost=(docs[i]->costfactor+docs[j]->costfactor)/2.0; + if(rankvalue[i] > rankvalue[j]) { + if(kernel_parm->kernel_type == LINEAR) + docdiff[k]=create_example(k,0,0,cost, + sub_ss(docs[i]->fvec,docs[j]->fvec)); + else { + flow=copy_svector(docs[j]->fvec); + flow->factor=-1.0; + flow->next=NULL; + fhigh=copy_svector(docs[i]->fvec); + fhigh->factor=1.0; + fhigh->next=flow; + docdiff[k]=create_example(k,0,0,cost,fhigh); + } + target[k]=1; + greater[k]=i; + lesser[k]=j; + k++; + } + else if(rankvalue[i] < rankvalue[j]) { + if(kernel_parm->kernel_type == LINEAR) + docdiff[k]=create_example(k,0,0,cost, + sub_ss(docs[i]->fvec,docs[j]->fvec)); + else { + flow=copy_svector(docs[j]->fvec); + flow->factor=-1.0; + flow->next=NULL; + fhigh=copy_svector(docs[i]->fvec); + fhigh->factor=1.0; + fhigh->next=flow; + docdiff[k]=create_example(k,0,0,cost,fhigh); + } + target[k]=-1; + greater[k]=i; + lesser[k]=j; + k++; + } + } + } + } + C_PRINTF("done.\n"); C_FFLUSH(stdout); + + /* need to get a bigger kernel cache */ + if(*kernel_cache) { + kernel_cache_size=(*kernel_cache)->buffsize*sizeof(CFLOAT)/(1024*1024); + kernel_cache_cleanup(*kernel_cache); + (*kernel_cache)=kernel_cache_init(totpair,kernel_cache_size); + } + + /* must use unbiased hyperplane on difference vectors */ + learn_parm->biased_hyperplane=0; + pairmodel=(MODEL *)my_malloc(sizeof(MODEL)); + svm_learn_classification(docdiff,target,totpair,totwords,learn_parm, + kernel_parm,(*kernel_cache),pairmodel,NULL); + + /* Transfer the result into a more compact model. If you would like + to output the original model on pairs of documents, see below. */ + alpha=(double *)my_malloc(sizeof(double)*totdoc); + for(i=0;isv_num;i++) { + alpha[lesser[(pairmodel->supvec[i])->docnum]]-=pairmodel->alpha[i]; + alpha[greater[(pairmodel->supvec[i])->docnum]]+=pairmodel->alpha[i]; + } + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2)); + model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2)); + model->index = (long *)my_malloc(sizeof(long)*(totdoc+2)); + model->supvec[0]=0; /* element 0 reserved and empty for now */ + model->alpha[0]=0; + model->sv_num=1; + for(i=0;isupvec[model->sv_num]=docs[i]; + model->alpha[model->sv_num]=alpha[i]; + model->index[i]=model->sv_num; + model->sv_num++; + } + else { + model->index[i]=-1; + } + } + model->at_upper_bound=0; + model->b=0; + model->lin_weights=NULL; + model->totwords=totwords; + model->totdoc=totdoc; + model->kernel_parm=(*kernel_parm); + model->loo_error=-1; + model->loo_recall=-1; + model->loo_precision=-1; + model->xa_error=-1; + model->xa_recall=-1; + model->xa_precision=-1; + + free(alpha); + free(greater); + free(lesser); + free(target); + + /* If you would like to output the original model on pairs of + document, replace the following lines with '(*model)=(*pairmodel);' */ + for(i=0;i rhs_i - \xi_i + + This corresponds to the -z o option. */ + +void svm_learn_optimization(DOC **docs, double *rhs, long int + totdoc, long int totwords, + LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE *kernel_cache, MODEL *model, + double *alpha) + /* docs: Left-hand side of inequalities (x-part) */ + /* rhs: Right-hand side of inequalities */ + /* totdoc: Number of examples in docs/label */ + /* totwords: Number of features (i.e. highest feature index) */ + /* learn_parm: Learning paramenters */ + /* kernel_parm: Kernel paramenters */ + /* kernel_cache:Initialized Cache of size 1*totdoc, if using a kernel. + NULL if linear.*/ + /* model: Returns solution as SV expansion (assumed empty before called) */ + /* alpha: Start values for the alpha variables or NULL + pointer. The new alpha values are returned after + optimization if not NULL. Array must be of size totdoc. */ +{ + long i,*label; + long misclassified,upsupvecnum; + double loss,model_length,example_length; + double maxdiff,*lin,*a,*c; + long runtime_start,runtime_end; + long iterations,maxslackid,svsetnum; + long *unlabeled,*inconsistent; + double r_delta_sq=0,r_delta,r_delta_avg; + long *index,*index2dnum; + double *weights,*slack,*alphaslack; + CFLOAT *aicache; /* buffer to keep one row of hessian */ + + TIMING timing_profile; + SHRINK_STATE shrink_state; + + runtime_start=get_runtime(); + timing_profile.time_kernel=0; + timing_profile.time_opti=0; + timing_profile.time_shrink=0; + timing_profile.time_update=0; + timing_profile.time_model=0; + timing_profile.time_check=0; + timing_profile.time_select=0; + kernel_cache_statistic=0; + + learn_parm->totwords=totwords; + + /* make sure -n value is reasonable */ + if((learn_parm->svm_newvarsinqp < 2) + || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) { + learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize; + } + + init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK); + + label = (long *)my_malloc(sizeof(long)*totdoc); + unlabeled = (long *)my_malloc(sizeof(long)*totdoc); + inconsistent = (long *)my_malloc(sizeof(long)*totdoc); + c = (double *)my_malloc(sizeof(double)*totdoc); + a = (double *)my_malloc(sizeof(double)*totdoc); + lin = (double *)my_malloc(sizeof(double)*totdoc); + learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc); + model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2)); + model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2)); + model->index = (long *)my_malloc(sizeof(long)*(totdoc+2)); + + model->at_upper_bound=0; + model->b=0; + model->supvec[0]=0; /* element 0 reserved and empty for now */ + model->alpha[0]=0; + model->lin_weights=NULL; + model->totwords=totwords; + model->totdoc=totdoc; + model->kernel_parm=(*kernel_parm); + model->sv_num=1; + model->loo_error=-1; + model->loo_recall=-1; + model->loo_precision=-1; + model->xa_error=-1; + model->xa_recall=-1; + model->xa_precision=-1; + + r_delta=estimate_r_delta(docs,totdoc,kernel_parm); + r_delta_sq=r_delta*r_delta; + + r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm); + if(learn_parm->svm_c == 0.0) { /* default value for C */ + learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg); + if(verbosity>=1) + C_PRINTF("Setting default regularization parameter C=%.4f\n", + learn_parm->svm_c); + } + + learn_parm->biased_hyperplane=0; /* learn an unbiased hyperplane */ + + learn_parm->eps=0.0; /* No margin, unless explicitly handcoded + in the right-hand side in the training + set. */ + + for(i=0;idocnum=i; + a[i]=0; + lin[i]=0; + c[i]=rhs[i]; /* set right-hand side */ + unlabeled[i]=0; + inconsistent[i]=0; + learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio* + docs[i]->costfactor; + label[i]=1; + } + if(learn_parm->sharedslack) /* if shared slacks are used, they must */ + for(i=0;islackid) { + perror("Error: Missing shared slacks definitions in some of the examples."); + C_EXIT(0); + } + + /* compute starting state for initial alpha values */ + if(alpha) { + if(verbosity>=1) { + C_PRINTF("Computing starting state..."); C_FFLUSH(stdout); + } + index = (long *)my_malloc(sizeof(long)*totdoc); + index2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + weights=(double *)my_malloc(sizeof(double)*(totwords+1)); + aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc); + for(i=0;ilearn_parm->svm_cost[i]) alpha[i]=learn_parm->svm_cost[i]; + } + if(kernel_parm->kernel_type != LINEAR) { + for(i=0;i0) && (alpha[i]svm_cost[i]) + && (kernel_cache_space_available(kernel_cache))) + cache_kernel_row(kernel_cache,docs,i,kernel_parm); + for(i=0;isvm_cost[i]) + && (kernel_cache_space_available(kernel_cache))) + cache_kernel_row(kernel_cache,docs,i,kernel_parm); + } + (void)compute_index(index,totdoc,index2dnum); + update_linear_component(docs,label,index2dnum,alpha,a,index2dnum,totdoc, + totwords,kernel_parm,kernel_cache,lin,aicache, + weights); + (void)calculate_svm_model(docs,label,unlabeled,lin,alpha,a,c, + learn_parm,index2dnum,index2dnum,model); + for(i=0;i=1) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + } + } + + /* removing inconsistent does not work for general optimization problem */ + if(learn_parm->remove_inconsistent) { + learn_parm->remove_inconsistent = 0; + C_PRINTF("'remove inconsistent' not available in this mode. Switching option off!"); C_FFLUSH(stdout); + } + + /* caching makes no sense for linear kernel */ + if(kernel_parm->kernel_type == LINEAR) { + kernel_cache = NULL; + } + + if(verbosity==1) { + C_PRINTF("Optimizing"); C_FFLUSH(stdout); + } + + /* train the svm */ + if(learn_parm->sharedslack) + iterations=optimize_to_convergence_sharedslack(docs,label,totdoc, + totwords,learn_parm,kernel_parm, + kernel_cache,&shrink_state,model, + a,lin,c,&timing_profile, + &maxdiff); + else + iterations=optimize_to_convergence(docs,label,totdoc, + totwords,learn_parm,kernel_parm, + kernel_cache,&shrink_state,model, + inconsistent,unlabeled, + a,lin,c,&timing_profile, + &maxdiff,(long)-1,(long)1); + + learn_parm->iterations=iterations; + + if(verbosity>=1) { + if(verbosity==1) C_PRINTF("done. (%ld iterations)\n",iterations); + + misclassified=0; + for(i=0;(ib)*(double)label[i] <= 0.0) + misclassified++; + } + + C_PRINTF("Optimization finished (maxdiff=%.5f).\n",maxdiff); + + runtime_end=get_runtime(); + if(verbosity>=2) { + C_PRINTF("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n", + ((float)runtime_end-(float)runtime_start)/100.0, + (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start), + (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start)); + } + else { + C_PRINTF("Runtime in cpu-seconds: %.2f\n", + (runtime_end-runtime_start)/100.0); + } + } + if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) { + loss=0; + model_length=0; + for(i=0;ib)*(double)label[i] < c[i]-learn_parm->epsilon_crit) + loss+=c[i]-(lin[i]-model->b)*(double)label[i]; + model_length+=a[i]*label[i]*lin[i]; + } + model_length=sqrt(model_length); + C_FPRINTF(stdout,"Norm of weight vector: |w|=%.5f\n",model_length); + } + + if(learn_parm->sharedslack) { + index = (long *)my_malloc(sizeof(long)*totdoc); + index2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + maxslackid=0; + for(i=0;islackid) + maxslackid=docs[i]->slackid; + } + (void)compute_index(index,totdoc,index2dnum); + slack=(double *)my_malloc(sizeof(double)*(maxslackid+1)); + alphaslack=(double *)my_malloc(sizeof(double)*(maxslackid+1)); + for(i=0;i<=maxslackid;i++) { /* init shared slacks */ + slack[i]=0; + alphaslack[i]=0; + } + compute_shared_slacks(docs,label,a,lin,c,index2dnum,learn_parm, + slack,alphaslack); + loss=0; + model->at_upper_bound=0; + svsetnum=0; + for(i=0;i<=maxslackid;i++) { /* create full index */ + loss+=slack[i]; + if(alphaslack[i] > (learn_parm->svm_c - learn_parm->epsilon_a)) + model->at_upper_bound++; + if(alphaslack[i] > learn_parm->epsilon_a) + svsetnum++; + } + free(index); + free(index2dnum); + free(slack); + free(alphaslack); + } + + if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) { + if(learn_parm->sharedslack) { + C_PRINTF("Number of SV: %ld\n", + model->sv_num-1); + C_PRINTF("Number of non-zero slack variables: %ld (out of %ld)\n", + model->at_upper_bound,svsetnum); + C_FPRINTF(stdout,"L1 loss: loss=%.5f\n",loss); + } + else { + upsupvecnum=0; + for(i=1;isv_num;i++) { + if(fabs(model->alpha[i]) >= + (learn_parm->svm_cost[(model->supvec[i])->docnum]- + learn_parm->epsilon_a)) + upsupvecnum++; + } + C_PRINTF("Number of SV: %ld (including %ld at upper bound)\n", + model->sv_num-1,upsupvecnum); + C_FPRINTF(stdout,"L1 loss: loss=%.5f\n",loss); + } + example_length=estimate_sphere(model,kernel_parm); + C_FPRINTF(stdout,"Norm of longest example vector: |x|=%.5f\n", + length_of_longest_document_vector(docs,totdoc,kernel_parm)); + } + if(verbosity>=1) { + C_PRINTF("Number of kernel evaluations: %ld\n",kernel_cache_statistic); + } + + if(alpha) { + for(i=0;ialphafile[0]) + write_alphas(learn_parm->alphafile,a,label,totdoc); + + shrink_state_cleanup(&shrink_state); + free(label); + free(unlabeled); + free(inconsistent); + free(c); + free(a); + free(lin); + free(learn_parm->svm_cost); +} + + +long optimize_to_convergence(DOC **docs, long int *label, long int totdoc, + long int totwords, LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE *kernel_cache, + SHRINK_STATE *shrink_state, MODEL *model, + long int *inconsistent, long int *unlabeled, + double *a, double *lin, double *c, + TIMING *timing_profile, double *maxdiff, + long int heldout, long int retrain) + /* docs: Training vectors (x-part) */ + /* label: Training labels/value (y-part, zero if test example for + transduction) */ + /* totdoc: Number of examples in docs/label */ + /* totwords: Number of features (i.e. highest feature index) */ + /* laern_parm: Learning paramenters */ + /* kernel_parm: Kernel paramenters */ + /* kernel_cache: Initialized/partly filled Cache, if using a kernel. + NULL if linear. */ + /* shrink_state: State of active variables */ + /* model: Returns learning result */ + /* inconsistent: examples thrown out as inconstistent */ + /* unlabeled: test examples for transduction */ + /* a: alphas */ + /* lin: linear component of gradient */ + /* c: right hand side of inequalities (margin) */ + /* maxdiff: returns maximum violation of KT-conditions */ + /* heldout: marks held-out example for leave-one-out (or -1) */ + /* retrain: selects training mode (1=regular / 2=holdout) */ +{ + long *chosen,*key,i,j,jj,*last_suboptimal_at,noshrink; + long inconsistentnum,choosenum,already_chosen=0,iteration; + long misclassified,supvecnum=0,*active2dnum,inactivenum; + long *working2dnum,*selexam; + long activenum; + double criterion,eq; + double *a_old; + long t0=0,t1=0,t2=0,t3=0,t4=0,t5=0,t6=0; /* timing */ + long transductcycle; + long transduction; + double epsilon_crit_org; + double bestmaxdiff; + long bestmaxdiffiter,terminate; + + double *selcrit; /* buffer for sorting */ + CFLOAT *aicache; /* buffer to keep one row of hessian */ + double *weights; /* buffer for weight vector in linear case */ + QP qp; /* buffer for one quadratic program */ + + epsilon_crit_org=learn_parm->epsilon_crit; /* save org */ + if(kernel_parm->kernel_type == LINEAR) { + learn_parm->epsilon_crit=2.0; + kernel_cache=NULL; /* caching makes no sense for linear kernel */ + } + learn_parm->epsilon_shrink=2; + (*maxdiff)=1; + + learn_parm->totwords=totwords; + + chosen = (long *)my_malloc(sizeof(long)*totdoc); + last_suboptimal_at = (long *)my_malloc(sizeof(long)*totdoc); + key = (long *)my_malloc(sizeof(long)*(totdoc+11)); + selcrit = (double *)my_malloc(sizeof(double)*totdoc); + selexam = (long *)my_malloc(sizeof(long)*totdoc); + a_old = (double *)my_malloc(sizeof(double)*totdoc); + aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc); + working2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + active2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + qp.opt_ce = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_ce0 = (double *)my_malloc(sizeof(double)); + qp.opt_g = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize + *learn_parm->svm_maxqpsize); + qp.opt_g0 = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_xinit = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_low=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_up=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + weights=(double *)my_malloc(sizeof(double)*(totwords+1)); + + choosenum=0; + inconsistentnum=0; + transductcycle=0; + transduction=0; + if(!retrain) retrain=1; + iteration=1; + bestmaxdiffiter=1; + bestmaxdiff=999999999; + terminate=0; + + if(kernel_cache) { + kernel_cache->time=iteration; /* for lru cache */ + kernel_cache_reset_lru(kernel_cache); + } + + for(i=0;iactive,totdoc,active2dnum); + inactivenum=totdoc-activenum; + clear_index(working2dnum); + + /* repeat this loop until we have convergence */ + for(;retrain && (!terminate);iteration++) { + + if(kernel_cache) + kernel_cache->time=iteration; /* for lru cache */ + if(verbosity>=2) { + C_PRINTF( + "Iteration %ld: ",iteration); C_FFLUSH(stdout); + } + else if(verbosity==1) { + C_PRINTF("."); C_FFLUSH(stdout); + } + + if(verbosity>=2) t0=get_runtime(); + if(verbosity>=3) { + C_PRINTF("\nSelecting working set... "); C_FFLUSH(stdout); + } + + if(learn_parm->svm_newvarsinqp>learn_parm->svm_maxqpsize) + learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize; + + i=0; + for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* clear working set */ + if((chosen[j]>=(learn_parm->svm_maxqpsize/ + minl(learn_parm->svm_maxqpsize, + learn_parm->svm_newvarsinqp))) + || (inconsistent[j]) + || (j == heldout)) { + chosen[j]=0; + choosenum--; + } + else { + chosen[j]++; + working2dnum[i++]=j; + } + } + working2dnum[i]=-1; + + if(retrain == 2) { + choosenum=0; + for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* fully clear working set */ + chosen[j]=0; + } + clear_index(working2dnum); + for(i=0;ibiased_hyperplane) { + eq=0; + for(i=0;i learn_parm->epsilon_a);i++) { + if((eq*label[i] > 0) && (a[i] > 0)) { + chosen[i]=88888; + choosenum++; + if((eq*label[i]) > a[i]) { + eq-=(a[i]*label[i]); + a[i]=0; + } + else { + a[i]-=(eq*label[i]); + eq=0; + } + } + } + } + compute_index(chosen,totdoc,working2dnum); + } + else { /* select working set according to steepest gradient */ + if(iteration % 101) { + already_chosen=0; + if((minl(learn_parm->svm_newvarsinqp, + learn_parm->svm_maxqpsize-choosenum)>=4) + && (kernel_parm->kernel_type != LINEAR)) { + /* select part of the working set from cache */ + already_chosen=select_next_qp_subproblem_grad( + label,unlabeled,a,lin,c,totdoc, + (long)(minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp) + /2), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache,1, + key,chosen); + choosenum+=already_chosen; + } + choosenum+=select_next_qp_subproblem_grad( + label,unlabeled,a,lin,c,totdoc, + minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp-already_chosen), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache,0,key, + chosen); + } + else { /* once in a while, select a somewhat random working set + to get unlocked of infinite loops due to numerical + inaccuracies in the core qp-solver */ + choosenum+=select_next_qp_subproblem_rand( + label,unlabeled,a,lin,c,totdoc, + minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache,key, + chosen,iteration); + } + } + + if(verbosity>=2) { + C_PRINTF(" %ld vectors chosen\n",choosenum); C_FFLUSH(stdout); + } + + if(verbosity>=2) t1=get_runtime(); + + if(kernel_cache) + cache_multiple_kernel_rows(kernel_cache,docs,working2dnum, + choosenum,kernel_parm); + + if(verbosity>=2) t2=get_runtime(); + if(retrain != 2) { + optimize_svm(docs,label,unlabeled,inconsistent,0.0,chosen,active2dnum, + model,totdoc,working2dnum,choosenum,a,lin,c,learn_parm, + aicache,kernel_parm,&qp,&epsilon_crit_org); + } + + if(verbosity>=2) t3=get_runtime(); + update_linear_component(docs,label,active2dnum,a,a_old,working2dnum,totdoc, + totwords,kernel_parm,kernel_cache,lin,aicache, + weights); + + if(verbosity>=2) t4=get_runtime(); + supvecnum=calculate_svm_model(docs,label,unlabeled,lin,a,a_old,c, + learn_parm,working2dnum,active2dnum,model); + + if(verbosity>=2) t5=get_runtime(); + + /* The following computation of the objective function works only */ + /* relative to the active variables */ + if(verbosity>=3) { + criterion=compute_objective_function(a,lin,c,learn_parm->eps,label, + active2dnum); + C_PRINTF("Objective function (over active variables): %.16f\n",criterion); + C_FFLUSH(stdout); + } + + for(jj=0;(i=working2dnum[jj])>=0;jj++) { + a_old[i]=a[i]; + } + + if(retrain == 2) { /* reset inconsistent unlabeled examples */ + for(i=0;(i=2) { + t6=get_runtime(); + timing_profile->time_select+=t1-t0; + timing_profile->time_kernel+=t2-t1; + timing_profile->time_opti+=t3-t2; + timing_profile->time_update+=t4-t3; + timing_profile->time_model+=t5-t4; + timing_profile->time_check+=t6-t5; + } + + /* checking whether optimizer got stuck */ + if((*maxdiff) < bestmaxdiff) { + bestmaxdiff=(*maxdiff); + bestmaxdiffiter=iteration; + } + if(iteration > (bestmaxdiffiter+learn_parm->maxiter)) { + /* long time no progress? */ + terminate=1; + retrain=0; + if(verbosity>=1) + C_PRINTF("\nWARNING: Relaxing KT-Conditions due to slow progress! Terminating!\n"); + } + + noshrink=0; + if((!retrain) && (inactivenum>0) + && ((!learn_parm->skip_final_opt_check) + || (kernel_parm->kernel_type == LINEAR))) { + if(((verbosity>=1) && (kernel_parm->kernel_type != LINEAR)) + || (verbosity>=2)) { + if(verbosity==1) { + C_PRINTF("\n"); + } + C_PRINTF(" Checking optimality of inactive variables..."); + C_FFLUSH(stdout); + } + t1=get_runtime(); + reactivate_inactive_examples(label,unlabeled,a,shrink_state,lin,c,totdoc, + totwords,iteration,learn_parm,inconsistent, + docs,kernel_parm,kernel_cache,model,aicache, + weights,maxdiff); + /* Update to new active variables. */ + activenum=compute_index(shrink_state->active,totdoc,active2dnum); + inactivenum=totdoc-activenum; + /* reset watchdog */ + bestmaxdiff=(*maxdiff); + bestmaxdiffiter=iteration; + /* termination criterion */ + noshrink=1; + retrain=0; + if((*maxdiff) > learn_parm->epsilon_crit) + retrain=1; + timing_profile->time_shrink+=get_runtime()-t1; + if(((verbosity>=1) && (kernel_parm->kernel_type != LINEAR)) + || (verbosity>=2)) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + C_PRINTF(" Number of inactive variables = %ld\n",inactivenum); + } + } + + if((!retrain) && (learn_parm->epsilon_crit>(*maxdiff))) + learn_parm->epsilon_crit=(*maxdiff); + if((!retrain) && (learn_parm->epsilon_crit>epsilon_crit_org)) { + learn_parm->epsilon_crit/=2.0; + retrain=1; + noshrink=1; + } + if(learn_parm->epsilon_critepsilon_crit=epsilon_crit_org; + + if(verbosity>=2) { + C_PRINTF(" => (%ld SV (incl. %ld SV at u-bound), max violation=%.5f)\n", + supvecnum,model->at_upper_bound,(*maxdiff)); + C_FFLUSH(stdout); + } + if(verbosity>=3) { + C_PRINTF("\n"); + } + + if((!retrain) && (transduction)) { + for(i=0;(iactive[i]=1; + } + activenum=compute_index(shrink_state->active,totdoc,active2dnum); + inactivenum=0; + if(verbosity==1) C_PRINTF("done\n"); + retrain=incorporate_unlabeled_examples(model,label,inconsistent, + unlabeled,a,lin,totdoc, + selcrit,selexam,key, + transductcycle,kernel_parm, + learn_parm); + epsilon_crit_org=learn_parm->epsilon_crit; + if(kernel_parm->kernel_type == LINEAR) + learn_parm->epsilon_crit=1; + transductcycle++; + /* reset watchdog */ + bestmaxdiff=(*maxdiff); + bestmaxdiffiter=iteration; + } + else if(((iteration % 10) == 0) && (!noshrink)) { + activenum=shrink_problem(docs,learn_parm,shrink_state,kernel_parm, + active2dnum,last_suboptimal_at,iteration,totdoc, + maxl((long)(activenum/10), + maxl((long)(totdoc/500),100)), + a,inconsistent); + inactivenum=totdoc-activenum; + if((kernel_cache) + && (supvecnum>kernel_cache->max_elems) + && ((kernel_cache->activenum-activenum)>maxl((long)(activenum/10),500))) { + kernel_cache_shrink(kernel_cache,totdoc, + minl((kernel_cache->activenum-activenum), + (kernel_cache->activenum-supvecnum)), + shrink_state->active); + } + } + + if((!retrain) && learn_parm->remove_inconsistent) { + if(verbosity>=1) { + C_PRINTF(" Moving training errors to inconsistent examples..."); + C_FFLUSH(stdout); + } + if(learn_parm->remove_inconsistent == 1) { + retrain=identify_inconsistent(a,label,unlabeled,totdoc,learn_parm, + &inconsistentnum,inconsistent); + } + else if(learn_parm->remove_inconsistent == 2) { + retrain=identify_misclassified(lin,label,unlabeled,totdoc, + model,&inconsistentnum,inconsistent); + } + else if(learn_parm->remove_inconsistent == 3) { + retrain=identify_one_misclassified(lin,label,unlabeled,totdoc, + model,&inconsistentnum,inconsistent); + } + if(retrain) { + if(kernel_parm->kernel_type == LINEAR) { /* reinit shrinking */ + learn_parm->epsilon_crit=2.0; + } + } + if(verbosity>=1) { + C_PRINTF("done.\n"); + if(retrain) { + C_PRINTF(" Now %ld inconsistent examples.\n",inconsistentnum); + } + } + } + } /* end of loop */ + + free(chosen); + free(last_suboptimal_at); + free(key); + free(selcrit); + free(selexam); + free(a_old); + free(aicache); + free(working2dnum); + free(active2dnum); + free(qp.opt_ce); + free(qp.opt_ce0); + free(qp.opt_g); + free(qp.opt_g0); + free(qp.opt_xinit); + free(qp.opt_low); + free(qp.opt_up); + free(weights); + + learn_parm->epsilon_crit=epsilon_crit_org; /* restore org */ + model->maxdiff=(*maxdiff); + + return(iteration); +} + +long optimize_to_convergence_sharedslack(DOC **docs, long int *label, + long int totdoc, + long int totwords, LEARN_PARM *learn_parm, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE *kernel_cache, + SHRINK_STATE *shrink_state, MODEL *model, + double *a, double *lin, double *c, + TIMING *timing_profile, double *maxdiff) + /* docs: Training vectors (x-part) */ + /* label: Training labels/value (y-part, zero if test example for + transduction) */ + /* totdoc: Number of examples in docs/label */ + /* totwords: Number of features (i.e. highest feature index) */ + /* learn_parm: Learning paramenters */ + /* kernel_parm: Kernel paramenters */ + /* kernel_cache: Initialized/partly filled Cache, if using a kernel. + NULL if linear. */ + /* shrink_state: State of active variables */ + /* model: Returns learning result */ + /* a: alphas */ + /* lin: linear component of gradient */ + /* c: right hand side of inequalities (margin) */ + /* maxdiff: returns maximum violation of KT-conditions */ +{ + long *chosen,*key,i,j,jj,*last_suboptimal_at,noshrink,*unlabeled; + long *inconsistent,choosenum,already_chosen=0,iteration; + long misclassified,supvecnum=0,*active2dnum,inactivenum; + long *working2dnum,*selexam,*ignore; + long activenum,retrain,maxslackid,slackset,jointstep; + double criterion,eq_target; + double *a_old,*alphaslack; + long t0=0,t1=0,t2=0,t3=0,t4=0,t5=0,t6=0; /* timing */ + double epsilon_crit_org,maxsharedviol; + double bestmaxdiff; + long bestmaxdiffiter,terminate; + + double *selcrit; /* buffer for sorting */ + CFLOAT *aicache; /* buffer to keep one row of hessian */ + double *weights; /* buffer for weight vector in linear case */ + QP qp; /* buffer for one quadratic program */ + double *slack; /* vector of slack variables for optimization with + shared slacks */ + + epsilon_crit_org=learn_parm->epsilon_crit; /* save org */ + if(kernel_parm->kernel_type == LINEAR) { + learn_parm->epsilon_crit=2.0; + kernel_cache=NULL; /* caching makes no sense for linear kernel */ + } + learn_parm->epsilon_shrink=2; + (*maxdiff)=1; + + learn_parm->totwords=totwords; + + chosen = (long *)my_malloc(sizeof(long)*totdoc); + unlabeled = (long *)my_malloc(sizeof(long)*totdoc); + inconsistent = (long *)my_malloc(sizeof(long)*totdoc); + ignore = (long *)my_malloc(sizeof(long)*totdoc); + key = (long *)my_malloc(sizeof(long)*(totdoc+11)); + selcrit = (double *)my_malloc(sizeof(double)*totdoc); + selexam = (long *)my_malloc(sizeof(long)*totdoc); + a_old = (double *)my_malloc(sizeof(double)*totdoc); + aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc); + working2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + active2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + qp.opt_ce = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_ce0 = (double *)my_malloc(sizeof(double)); + qp.opt_g = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize + *learn_parm->svm_maxqpsize); + qp.opt_g0 = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_xinit = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_low=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + qp.opt_up=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize); + weights=(double *)my_malloc(sizeof(double)*(totwords+1)); + maxslackid=0; + for(i=0;islackid) + maxslackid=docs[i]->slackid; + } + slack=(double *)my_malloc(sizeof(double)*(maxslackid+1)); + alphaslack=(double *)my_malloc(sizeof(double)*(maxslackid+1)); + last_suboptimal_at = (long *)my_malloc(sizeof(long)*(maxslackid+1)); + for(i=0;i<=maxslackid;i++) { /* init shared slacks */ + slack[i]=0; + alphaslack[i]=0; + last_suboptimal_at[i]=1; + } + + choosenum=0; + retrain=1; + iteration=1; + bestmaxdiffiter=1; + bestmaxdiff=999999999; + terminate=0; + + if(kernel_cache) { + kernel_cache->time=iteration; /* for lru cache */ + kernel_cache_reset_lru(kernel_cache); + } + + for(i=0;iactive,totdoc,active2dnum); + inactivenum=totdoc-activenum; + clear_index(working2dnum); + + /* call to init slack and alphaslack */ + compute_shared_slacks(docs,label,a,lin,c,active2dnum,learn_parm, + slack,alphaslack); + + /* repeat this loop until we have convergence */ + for(;retrain && (!terminate);iteration++) { + + if(kernel_cache) + kernel_cache->time=iteration; /* for lru cache */ + if(verbosity>=2) { + C_PRINTF( + "Iteration %ld: ",iteration); C_FFLUSH(stdout); + } + else if(verbosity==1) { + C_PRINTF("."); C_FFLUSH(stdout); + } + + if(verbosity>=2) t0=get_runtime(); + if(verbosity>=3) { + C_PRINTF("\nSelecting working set... "); C_FFLUSH(stdout); + } + + if(learn_parm->svm_newvarsinqp>learn_parm->svm_maxqpsize) + learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize; + + /* select working set according to steepest gradient */ + jointstep=0; + eq_target=0; + if(iteration % 101) { + slackset=select_next_qp_slackset(docs,label,a,lin,slack,alphaslack,c, + learn_parm,active2dnum,&maxsharedviol); + if((iteration % 2) + || (!slackset) || (maxsharedviolepsilon_crit)){ + /* do a step with examples from different slack sets */ + if(verbosity >= 2) { + C_PRINTF("(i-step)"); C_FFLUSH(stdout); + } + i=0; + for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* clear old part of working set */ + if((chosen[j]>=(learn_parm->svm_maxqpsize/ + minl(learn_parm->svm_maxqpsize, + learn_parm->svm_newvarsinqp)))) { + chosen[j]=0; + choosenum--; + } + else { + chosen[j]++; + working2dnum[i++]=j; + } + } + working2dnum[i]=-1; + + already_chosen=0; + if((minl(learn_parm->svm_newvarsinqp, + learn_parm->svm_maxqpsize-choosenum)>=4) + && (kernel_parm->kernel_type != LINEAR)) { + /* select part of the working set from cache */ + already_chosen=select_next_qp_subproblem_grad( + label,unlabeled,a,lin,c,totdoc, + (long)(minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp) + /2), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache, + (long)1,key,chosen); + choosenum+=already_chosen; + } + choosenum+=select_next_qp_subproblem_grad( + label,unlabeled,a,lin,c,totdoc, + minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp-already_chosen), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache, + (long)0,key,chosen); + } + else { /* do a step with all examples from same slack set */ + if(verbosity >= 2) { + C_PRINTF("(j-step on %ld)",slackset); C_FFLUSH(stdout); + } + jointstep=1; + for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* clear working set */ + chosen[j]=0; + } + working2dnum[0]=-1; + eq_target=alphaslack[slackset]; + for(j=0;j=0;jj++) { */ + if(docs[j]->slackid != slackset) + ignore[j]=1; + else { + ignore[j]=0; + learn_parm->svm_cost[j]=learn_parm->svm_c; + /* C_PRINTF("Inslackset(%ld,%ld)",j,shrink_state->active[j]); */ + } + } + learn_parm->biased_hyperplane=1; + choosenum=select_next_qp_subproblem_grad( + label,unlabeled,a,lin,c,totdoc, + learn_parm->svm_maxqpsize, + learn_parm,ignore,active2dnum, + working2dnum,selcrit,selexam,kernel_cache, + (long)0,key,chosen); + learn_parm->biased_hyperplane=0; + } + } + else { /* once in a while, select a somewhat random working set + to get unlocked of infinite loops due to numerical + inaccuracies in the core qp-solver */ + choosenum+=select_next_qp_subproblem_rand( + label,unlabeled,a,lin,c,totdoc, + minl(learn_parm->svm_maxqpsize-choosenum, + learn_parm->svm_newvarsinqp), + learn_parm,inconsistent,active2dnum, + working2dnum,selcrit,selexam,kernel_cache,key, + chosen,iteration); + } + + if(verbosity>=2) { + C_PRINTF(" %ld vectors chosen\n",choosenum); C_FFLUSH(stdout); + } + + if(verbosity>=2) t1=get_runtime(); + + if(kernel_cache) + cache_multiple_kernel_rows(kernel_cache,docs,working2dnum, + choosenum,kernel_parm); + + if(verbosity>=2) t2=get_runtime(); + if(jointstep) learn_parm->biased_hyperplane=1; + optimize_svm(docs,label,unlabeled,ignore,eq_target,chosen,active2dnum, + model,totdoc,working2dnum,choosenum,a,lin,c,learn_parm, + aicache,kernel_parm,&qp,&epsilon_crit_org); + learn_parm->biased_hyperplane=0; + + for(jj=0;(i=working2dnum[jj])>=0;jj++) /* recompute sums of alphas */ + alphaslack[docs[i]->slackid]+=(a[i]-a_old[i]); + for(jj=0;(i=working2dnum[jj])>=0;jj++) { /* reduce alpha to fulfill + constraints */ + if(alphaslack[docs[i]->slackid] > learn_parm->svm_c) { + if(a[i] < (alphaslack[docs[i]->slackid]-learn_parm->svm_c)) { + alphaslack[docs[i]->slackid]-=a[i]; + a[i]=0; + } + else { + a[i]-=(alphaslack[docs[i]->slackid]-learn_parm->svm_c); + alphaslack[docs[i]->slackid]=learn_parm->svm_c; + } + } + } + for(jj=0;(i=active2dnum[jj])>=0;jj++) + learn_parm->svm_cost[i]=a[i]+(learn_parm->svm_c + -alphaslack[docs[i]->slackid]); + + if(verbosity>=2) t3=get_runtime(); + update_linear_component(docs,label,active2dnum,a,a_old,working2dnum,totdoc, + totwords,kernel_parm,kernel_cache,lin,aicache, + weights); + compute_shared_slacks(docs,label,a,lin,c,active2dnum,learn_parm, + slack,alphaslack); + + if(verbosity>=2) t4=get_runtime(); + supvecnum=calculate_svm_model(docs,label,unlabeled,lin,a,a_old,c, + learn_parm,working2dnum,active2dnum,model); + + if(verbosity>=2) t5=get_runtime(); + + /* The following computation of the objective function works only */ + /* relative to the active variables */ + if(verbosity>=3) { + criterion=compute_objective_function(a,lin,c,learn_parm->eps,label, + active2dnum); + C_PRINTF("Objective function (over active variables): %.16f\n",criterion); + C_FFLUSH(stdout); + } + + for(jj=0;(i=working2dnum[jj])>=0;jj++) { + a_old[i]=a[i]; + } + + retrain=check_optimality_sharedslack(docs,model,label,a,lin,c, + slack,alphaslack,totdoc,learn_parm, + maxdiff,epsilon_crit_org,&misclassified, + active2dnum,last_suboptimal_at, + iteration,kernel_parm); + + if(verbosity>=2) { + t6=get_runtime(); + timing_profile->time_select+=t1-t0; + timing_profile->time_kernel+=t2-t1; + timing_profile->time_opti+=t3-t2; + timing_profile->time_update+=t4-t3; + timing_profile->time_model+=t5-t4; + timing_profile->time_check+=t6-t5; + } + + /* checking whether optimizer got stuck */ + if((*maxdiff) < bestmaxdiff) { + bestmaxdiff=(*maxdiff); + bestmaxdiffiter=iteration; + } + if(iteration > (bestmaxdiffiter+learn_parm->maxiter)) { + /* long time no progress? */ + terminate=1; + retrain=0; + if(verbosity>=1) + C_PRINTF("\nWARNING: Relaxing KT-Conditions due to slow progress! Terminating!\n"); + } + + noshrink=0; + + if((!retrain) && (inactivenum>0) + && ((!learn_parm->skip_final_opt_check) + || (kernel_parm->kernel_type == LINEAR))) { + if(((verbosity>=1) && (kernel_parm->kernel_type != LINEAR)) + || (verbosity>=2)) { + if(verbosity==1) { + C_PRINTF("\n"); + } + C_PRINTF(" Checking optimality of inactive variables..."); + C_FFLUSH(stdout); + } + t1=get_runtime(); + reactivate_inactive_examples(label,unlabeled,a,shrink_state,lin,c,totdoc, + totwords,iteration,learn_parm,inconsistent, + docs,kernel_parm,kernel_cache,model,aicache, + weights,maxdiff); + /* Update to new active variables. */ + activenum=compute_index(shrink_state->active,totdoc,active2dnum); + inactivenum=totdoc-activenum; + /* check optimality, since check in reactivate does not work for + sharedslacks */ + retrain=check_optimality_sharedslack(docs,model,label,a,lin,c, + slack,alphaslack,totdoc,learn_parm, + maxdiff,epsilon_crit_org,&misclassified, + active2dnum,last_suboptimal_at, + iteration,kernel_parm); + + /* reset watchdog */ + bestmaxdiff=(*maxdiff); + bestmaxdiffiter=iteration; + /* termination criterion */ + noshrink=1; + retrain=0; + if((*maxdiff) > learn_parm->epsilon_crit) + retrain=1; + timing_profile->time_shrink+=get_runtime()-t1; + if(((verbosity>=1) && (kernel_parm->kernel_type != LINEAR)) + || (verbosity>=2)) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + C_PRINTF(" Number of inactive variables = %ld\n",inactivenum); + } + } + + if((!retrain) && (learn_parm->epsilon_crit>(*maxdiff))) + learn_parm->epsilon_crit=(*maxdiff); + if((!retrain) && (learn_parm->epsilon_crit>epsilon_crit_org)) { + learn_parm->epsilon_crit/=2.0; + retrain=1; + noshrink=1; + } + if(learn_parm->epsilon_critepsilon_crit=epsilon_crit_org; + + if(verbosity>=2) { + C_PRINTF(" => (%ld SV (incl. %ld SV at u-bound), max violation=%.5f)\n", + supvecnum,model->at_upper_bound,(*maxdiff)); + C_FFLUSH(stdout); + } + if(verbosity>=3) { + C_PRINTF("\n"); + } + + if(((iteration % 10) == 0) && (!noshrink)) { + activenum=shrink_problem(docs,learn_parm,shrink_state, + kernel_parm,active2dnum, + last_suboptimal_at,iteration,totdoc, + maxl((long)(activenum/10), + maxl((long)(totdoc/500),100)), + a,inconsistent); + inactivenum=totdoc-activenum; + if((kernel_cache) + && (supvecnum>kernel_cache->max_elems) + && ((kernel_cache->activenum-activenum)>maxl((long)(activenum/10),500))) { + kernel_cache_shrink(kernel_cache,totdoc, + minl((kernel_cache->activenum-activenum), + (kernel_cache->activenum-supvecnum)), + shrink_state->active); + } + } + + } /* end of loop */ + + + free(alphaslack); + free(slack); + free(chosen); + free(unlabeled); + free(inconsistent); + free(ignore); + free(last_suboptimal_at); + free(key); + free(selcrit); + free(selexam); + free(a_old); + free(aicache); + free(working2dnum); + free(active2dnum); + free(qp.opt_ce); + free(qp.opt_ce0); + free(qp.opt_g); + free(qp.opt_g0); + free(qp.opt_xinit); + free(qp.opt_low); + free(qp.opt_up); + free(weights); + + learn_parm->epsilon_crit=epsilon_crit_org; /* restore org */ + model->maxdiff=(*maxdiff); + + return(iteration); +} + + +double compute_objective_function(double *a, double *lin, double *c, + double eps, long int *label, + long int *active2dnum) + /* Return value of objective function. */ + /* Works only relative to the active variables! */ +{ + long i,ii; + double criterion; + /* calculate value of objective function */ + criterion=0; + for(ii=0;active2dnum[ii]>=0;ii++) { + i=active2dnum[ii]; + criterion=criterion+(eps-(double)label[i]*c[i])*a[i]+0.5*a[i]*label[i]*lin[i]; + } + return(criterion); +} + +void clear_index(long int *index) + /* initializes and empties index */ +{ + index[0]=-1; +} + +void add_to_index(long int *index, long int elem) + /* initializes and empties index */ +{ + register long i; + for(i=0;index[i] != -1;i++); + index[i]=elem; + index[i+1]=-1; +} + +long compute_index(long int *binfeature, long int range, long int *index) + /* create an inverted index of binfeature */ +{ + register long i,ii; + + ii=0; + for(i=0;i=3) { + C_PRINTF("Running optimizer..."); C_FFLUSH(stdout); + } + /* call the qp-subsolver */ + a_v=optimize_qp(qp,epsilon_crit_target, + learn_parm->svm_maxqpsize, + &(model->b), /* in case the optimizer gives us */ + /* the threshold for free. otherwise */ + /* b is calculated in calculate_model. */ + learn_parm); + if(verbosity>=3) { + C_PRINTF("done\n"); + } + + for(i=0;iepsilon_a)) { + a[working2dnum[i]]=0; + } + else if(a_v[i]>=(learn_parm->svm_cost[working2dnum[i]]-learn_parm->epsilon_a)) { + a[working2dnum[i]]=learn_parm->svm_cost[working2dnum[i]]; + } + */ + } +} + +void compute_matrices_for_optimization(DOC **docs, long int *label, + long int *unlabeled, long *exclude_from_eq_const, double eq_target, + long int *chosen, long int *active2dnum, + long int *key, MODEL *model, double *a, double *lin, double *c, + long int varnum, long int totdoc, LEARN_PARM *learn_parm, + CFLOAT *aicache, KERNEL_PARM *kernel_parm, QP *qp) +{ + register long ki,kj,i,j; + register double kernel_temp; + + if(verbosity>=3) { + C_FPRINTF(stdout,"Computing qp-matrices (type %ld kernel [degree %ld, rbf_gamma %f, coef_lin %f, coef_const %f])...",kernel_parm->kernel_type,kernel_parm->poly_degree,kernel_parm->rbf_gamma,kernel_parm->coef_lin,kernel_parm->coef_const); + C_FFLUSH(stdout); + } + + qp->opt_n=varnum; + qp->opt_ce0[0]=-eq_target; /* compute the constant for equality constraint */ + for(j=1;jsv_num;j++) { /* start at 1 */ + if((!chosen[(model->supvec[j])->docnum]) + && (!exclude_from_eq_const[(model->supvec[j])->docnum])) { + qp->opt_ce0[0]+=model->alpha[j]; + } + } + if(learn_parm->biased_hyperplane) + qp->opt_m=1; + else + qp->opt_m=0; /* eq-constraint will be ignored */ + + /* init linear part of objective function */ + for(i=0;iopt_g0[i]=lin[key[i]]; + } + + for(i=0;iopt_ce[i]=label[ki]; + qp->opt_low[i]=0; + qp->opt_up[i]=learn_parm->svm_cost[ki]; + + kernel_temp=(double)kernel(kernel_parm,docs[ki],docs[ki]); + /* compute linear part of objective function */ + qp->opt_g0[i]-=(kernel_temp*a[ki]*(double)label[ki]); + /* compute quadratic part of objective function */ + qp->opt_g[varnum*i+i]=kernel_temp; + for(j=i+1;jopt_g0[i]-=(kernel_temp*a[kj]*(double)label[kj]); + qp->opt_g0[j]-=(kernel_temp*a[ki]*(double)label[ki]); + /* compute quadratic part of objective function */ + qp->opt_g[varnum*i+j]=(double)label[ki]*(double)label[kj]*kernel_temp; + qp->opt_g[varnum*j+i]=(double)label[ki]*(double)label[kj]*kernel_temp; + } + + if(verbosity>=3) { + if(i % 20 == 0) { + C_FPRINTF(stdout,"%ld..",i); C_FFLUSH(stdout); + } + } + } + + for(i=0;iopt_xinit[i]=a[key[i]]; + /* set linear part of objective function */ + qp->opt_g0[i]=(learn_parm->eps-(double)label[key[i]]*c[key[i]])+qp->opt_g0[i]*(double)label[key[i]]; + } + + if(verbosity>=3) { + C_FPRINTF(stdout,"done\n"); + } +} + +long calculate_svm_model(DOC **docs, long int *label, long int *unlabeled, + double *lin, double *a, double *a_old, double *c, + LEARN_PARM *learn_parm, long int *working2dnum, + long int *active2dnum, MODEL *model) + /* Compute decision function based on current values */ + /* of alpha. */ +{ + long i,ii,pos,b_calculated=0,first_low,first_high; + double ex_c,b_temp,b_low,b_high; + + if(verbosity>=3) { + C_PRINTF("Calculating model..."); C_FFLUSH(stdout); + } + + if(!learn_parm->biased_hyperplane) { + model->b=0; + b_calculated=1; + } + + for(ii=0;(i=working2dnum[ii])>=0;ii++) { + if((a_old[i]>0) && (a[i]==0)) { /* remove from model */ + pos=model->index[i]; + model->index[i]=-1; + (model->sv_num)--; + model->supvec[pos]=model->supvec[model->sv_num]; + model->alpha[pos]=model->alpha[model->sv_num]; + model->index[(model->supvec[pos])->docnum]=pos; + } + else if((a_old[i]==0) && (a[i]>0)) { /* add to model */ + model->supvec[model->sv_num]=docs[i]; + model->alpha[model->sv_num]=a[i]*(double)label[i]; + model->index[i]=model->sv_num; + (model->sv_num)++; + } + else if(a_old[i]==a[i]) { /* nothing to do */ + } + else { /* just update alpha */ + model->alpha[model->index[i]]=a[i]*(double)label[i]; + } + + ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a; + if((a_old[i]>=ex_c) && (a[i]at_upper_bound)--; + } + else if((a_old[i]=ex_c)) { + (model->at_upper_bound)++; + } + + if((!b_calculated) + && (a[i]>learn_parm->epsilon_a) && (a[i]b=((double)label[i]*learn_parm->eps-c[i]+lin[i]); + /* model->b=(-(double)label[i]+lin[i]); */ + b_calculated=1; + } + } + + /* No alpha in the working set not at bounds, so b was not + calculated in the usual way. The following handles this special + case. */ + if(learn_parm->biased_hyperplane + && (!b_calculated) + && (model->sv_num-1 == model->at_upper_bound)) { + first_low=1; + first_high=1; + b_low=0; + b_high=0; + for(ii=0;(i=active2dnum[ii])>=0;ii++) { + ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a; + if(a[i]0) { + b_temp=-(learn_parm->eps-c[i]+lin[i]); + if((b_temp>b_low) || (first_low)) { + b_low=b_temp; + first_low=0; + } + } + else { + b_temp=-(-learn_parm->eps-c[i]+lin[i]); + if((b_tempeps-c[i]+lin[i]); + if((b_temp>b_low) || (first_low)) { + b_low=b_temp; + first_low=0; + } + } + else { + b_temp=-(learn_parm->eps-c[i]+lin[i]); + if((b_tempb=-b_low; + } + else if(first_low) { + model->b=-b_high; + } + else { + model->b=-(b_high+b_low)/2.0; /* select b as the middle of range */ + /* C_PRINTF("\nb_low=%f, b_high=%f,b=%f\n",b_low,b_high,model->b); */ + } + } + + if(verbosity>=3) { + C_PRINTF("done\n"); C_FFLUSH(stdout); + } + + return(model->sv_num-1); /* have to substract one, since element 0 is empty*/ +} + +long check_optimality(MODEL *model, long int *label, long int *unlabeled, + double *a, double *lin, double *c, long int totdoc, + LEARN_PARM *learn_parm, double *maxdiff, + double epsilon_crit_org, long int *misclassified, + long int *inconsistent, long int *active2dnum, + long int *last_suboptimal_at, + long int iteration, KERNEL_PARM *kernel_parm) + /* Check KT-conditions */ +{ + long i,ii,retrain; + double dist,ex_c,target; + + if(kernel_parm->kernel_type == LINEAR) { /* be optimistic */ + learn_parm->epsilon_shrink=-learn_parm->epsilon_crit+epsilon_crit_org; + } + else { /* be conservative */ + learn_parm->epsilon_shrink=learn_parm->epsilon_shrink*0.7+(*maxdiff)*0.3; + } + retrain=0; + (*maxdiff)=0; + (*misclassified)=0; + for(ii=0;(i=active2dnum[ii])>=0;ii++) { + if((!inconsistent[i]) && label[i]) { + dist=(lin[i]-model->b)*(double)label[i];/* 'distance' from + hyperplane*/ + target=-(learn_parm->eps-(double)label[i]*c[i]); + ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a; + if(dist <= 0) { + (*misclassified)++; /* does not work due to deactivation of var */ + } + if((a[i]>learn_parm->epsilon_a) && (dist > target)) { + if((dist-target)>(*maxdiff)) /* largest violation */ + (*maxdiff)=dist-target; + } + else if((a[i](*maxdiff)) /* largest violation */ + (*maxdiff)=target-dist; + } + /* Count how long a variable was at lower/upper bound (and optimal).*/ + /* Variables, which were at the bound and optimal for a long */ + /* time are unlikely to become support vectors. In case our */ + /* cache is filled up, those variables are excluded to save */ + /* kernel evaluations. (See chapter 'Shrinking').*/ + if((a[i]>(learn_parm->epsilon_a)) + && (a[i]epsilon_a)) + && (dist < (target+learn_parm->epsilon_shrink))) { + last_suboptimal_at[i]=iteration; /* not likely optimal */ + } + else if((a[i]>=ex_c) + && (dist > (target-learn_parm->epsilon_shrink))) { + last_suboptimal_at[i]=iteration; /* not likely optimal */ + } + } + } + /* termination criterion */ + if((!retrain) && ((*maxdiff) > learn_parm->epsilon_crit)) { + retrain=1; + } + return(retrain); +} + +long check_optimality_sharedslack(DOC **docs, MODEL *model, long int *label, + double *a, double *lin, double *c, double *slack, + double *alphaslack, + long int totdoc, + LEARN_PARM *learn_parm, double *maxdiff, + double epsilon_crit_org, long int *misclassified, + long int *active2dnum, + long int *last_suboptimal_at, + long int iteration, KERNEL_PARM *kernel_parm) + /* Check KT-conditions */ +{ + long i,ii,retrain; + double dist,ex_c=0,target; + + if(kernel_parm->kernel_type == LINEAR) { /* be optimistic */ + learn_parm->epsilon_shrink=-learn_parm->epsilon_crit+epsilon_crit_org; + } + else { /* be conservative */ + learn_parm->epsilon_shrink=learn_parm->epsilon_shrink*0.7+(*maxdiff)*0.3; + } + + retrain=0; + (*maxdiff)=0; + (*misclassified)=0; + for(ii=0;(i=active2dnum[ii])>=0;ii++) { + /* 'distance' from hyperplane*/ + dist=(lin[i]-model->b)*(double)label[i]+slack[docs[i]->slackid]; + target=-(learn_parm->eps-(double)label[i]*c[i]); + ex_c=learn_parm->svm_c-learn_parm->epsilon_a; + if((a[i]>learn_parm->epsilon_a) && (dist > target)) { + if((dist-target)>(*maxdiff)) { /* largest violation */ + (*maxdiff)=dist-target; + if(verbosity>=5) C_PRINTF("sid %ld: dist=%.2f, target=%.2f, slack=%.2f, a=%f, alphaslack=%f\n",docs[i]->slackid,dist,target,slack[docs[i]->slackid],a[i],alphaslack[docs[i]->slackid]); + if(verbosity>=5) C_PRINTF(" (single %f)\n",(*maxdiff)); + } + } + if((alphaslack[docs[i]->slackid]slackid]>0)) { + if((slack[docs[i]->slackid])>(*maxdiff)) { /* largest violation */ + (*maxdiff)=slack[docs[i]->slackid]; + if(verbosity>=5) C_PRINTF("sid %ld: dist=%.2f, target=%.2f, slack=%.2f, a=%f, alphaslack=%f\n",docs[i]->slackid,dist,target,slack[docs[i]->slackid],a[i],alphaslack[docs[i]->slackid]); + if(verbosity>=5) C_PRINTF(" (joint %f)\n",(*maxdiff)); + } + } + /* Count how long a variable was at lower/upper bound (and optimal).*/ + /* Variables, which were at the bound and optimal for a long */ + /* time are unlikely to become support vectors. In case our */ + /* cache is filled up, those variables are excluded to save */ + /* kernel evaluations. (See chapter 'Shrinking').*/ + if((a[i]>(learn_parm->epsilon_a)) + && (a[i]slackid]=iteration; /* not at bound */ + } + else if((a[i]<=(learn_parm->epsilon_a)) + && (dist < (target+learn_parm->epsilon_shrink))) { + last_suboptimal_at[docs[i]->slackid]=iteration; /* not likely optimal */ + } + else if((a[i]>=ex_c) + && (slack[docs[i]->slackid] < learn_parm->epsilon_shrink)) { + last_suboptimal_at[docs[i]->slackid]=iteration; /* not likely optimal */ + } + } + /* termination criterion */ + if((!retrain) && ((*maxdiff) > learn_parm->epsilon_crit)) { + retrain=1; + } + return(retrain); +} + +void compute_shared_slacks(DOC **docs, long int *label, + double *a, double *lin, + double *c, long int *active2dnum, + LEARN_PARM *learn_parm, + double *slack, double *alphaslack) + /* compute the value of shared slacks and the joint alphas */ +{ + long jj,i; + double dist,target; + + for(jj=0;(i=active2dnum[jj])>=0;jj++) { /* clear slack variables */ + slack[docs[i]->slackid]=0.0; + alphaslack[docs[i]->slackid]=0.0; + } + for(jj=0;(i=active2dnum[jj])>=0;jj++) { /* recompute slack variables */ + dist=(lin[i])*(double)label[i]; + target=-(learn_parm->eps-(double)label[i]*c[i]); + if((target-dist) > slack[docs[i]->slackid]) + slack[docs[i]->slackid]=target-dist; + alphaslack[docs[i]->slackid]+=a[i]; + } +} + + +long identify_inconsistent(double *a, long int *label, + long int *unlabeled, long int totdoc, + LEARN_PARM *learn_parm, + long int *inconsistentnum, long int *inconsistent) +{ + long i,retrain; + + /* Throw out examples with multipliers at upper bound. This */ + /* corresponds to the -i 1 option. */ + /* ATTENTION: this is just a heuristic for finding a close */ + /* to minimum number of examples to exclude to */ + /* make the problem separable with desired margin */ + retrain=0; + for(i=0;i=(learn_parm->svm_cost[i]-learn_parm->epsilon_a))) { + (*inconsistentnum)++; + inconsistent[i]=1; /* never choose again */ + retrain=2; /* start over */ + if(verbosity>=3) { + C_PRINTF("inconsistent(%ld)..",i); C_FFLUSH(stdout); + } + } + } + return(retrain); +} + +long identify_misclassified(double *lin, long int *label, + long int *unlabeled, long int totdoc, + MODEL *model, long int *inconsistentnum, + long int *inconsistent) +{ + long i,retrain; + double dist; + + /* Throw out misclassified examples. This */ + /* corresponds to the -i 2 option. */ + /* ATTENTION: this is just a heuristic for finding a close */ + /* to minimum number of examples to exclude to */ + /* make the problem separable with desired margin */ + retrain=0; + for(i=0;ib)*(double)label[i]; /* 'distance' from hyperplane*/ + if((!inconsistent[i]) && (!unlabeled[i]) && (dist <= 0)) { + (*inconsistentnum)++; + inconsistent[i]=1; /* never choose again */ + retrain=2; /* start over */ + if(verbosity>=3) { + C_PRINTF("inconsistent(%ld)..",i); C_FFLUSH(stdout); + } + } + } + return(retrain); +} + +long identify_one_misclassified(double *lin, long int *label, + long int *unlabeled, + long int totdoc, MODEL *model, + long int *inconsistentnum, + long int *inconsistent) +{ + long i,retrain,maxex=-1; + double dist,maxdist=0; + + /* Throw out the 'most misclassified' example. This */ + /* corresponds to the -i 3 option. */ + /* ATTENTION: this is just a heuristic for finding a close */ + /* to minimum number of examples to exclude to */ + /* make the problem separable with desired margin */ + retrain=0; + for(i=0;ib)*(double)label[i];/* 'distance' from hyperplane*/ + if(dist=0) { + (*inconsistentnum)++; + inconsistent[maxex]=1; /* never choose again */ + retrain=2; /* start over */ + if(verbosity>=3) { + C_PRINTF("inconsistent(%ld)..",i); C_FFLUSH(stdout); + } + } + return(retrain); +} + +void update_linear_component(DOC **docs, long int *label, + long int *active2dnum, double *a, + double *a_old, long int *working2dnum, + long int totdoc, long int totwords, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE *kernel_cache, + double *lin, CFLOAT *aicache, double *weights) + /* keep track of the linear component */ + /* lin of the gradient etc. by updating */ + /* based on the change of the variables */ + /* in the current working set */ +{ + register long i,ii,j,jj; + register double tec; + SVECTOR *f; + + if(kernel_parm->kernel_type==0) { /* special linear case */ + clear_vector_n(weights,totwords); + for(ii=0;(i=working2dnum[ii])>=0;ii++) { + if(a[i] != a_old[i]) { + for(f=docs[i]->fvec;f;f=f->next) + add_vector_ns(weights,f, + f->factor*((a[i]-a_old[i])*(double)label[i])); + } + } + for(jj=0;(j=active2dnum[jj])>=0;jj++) { + for(f=docs[j]->fvec;f;f=f->next) + lin[j]+=f->factor*sprod_ns(weights,f); + } + } + else { /* general case */ + for(jj=0;(i=working2dnum[jj])>=0;jj++) { + if(a[i] != a_old[i]) { + get_kernel_row(kernel_cache,docs,i,totdoc,active2dnum,aicache, + kernel_parm); + for(ii=0;(j=active2dnum[ii])>=0;ii++) { + tec=aicache[j]; + lin[j]+=(((a[i]*tec)-(a_old[i]*tec))*(double)label[i]); + } + } + } + } +} + + +long incorporate_unlabeled_examples(MODEL *model, long int *label, + long int *inconsistent, + long int *unlabeled, + double *a, double *lin, + long int totdoc, double *selcrit, + long int *select, long int *key, + long int transductcycle, + KERNEL_PARM *kernel_parm, + LEARN_PARM *learn_parm) +{ + long i,j,k,j1,j2,j3,j4,unsupaddnum1=0,unsupaddnum2=0; + long pos,neg,upos,uneg,orgpos,orgneg,nolabel,newpos,newneg,allunlab; + double dist,model_length,posratio,negratio; + long check_every=2; + double loss; + static double switchsens=0.0,switchsensorg=0.0; + double umin,umax,sumalpha; + long imin=0,imax=0; + static long switchnum=0; + + switchsens/=1.2; + + /* assumes that lin[] is up to date -> no inactive vars */ + + orgpos=0; + orgneg=0; + newpos=0; + newneg=0; + nolabel=0; + allunlab=0; + for(i=0;i 0) { + orgpos++; + } + else { + orgneg++; + } + } + else { + allunlab++; + if(unlabeled[i]) { + if(label[i] > 0) { + newpos++; + } + else if(label[i] < 0) { + newneg++; + } + } + } + if(label[i]==0) { + nolabel++; + } + } + + if(learn_parm->transduction_posratio >= 0) { + posratio=learn_parm->transduction_posratio; + } + else { + posratio=(double)orgpos/(double)(orgpos+orgneg); /* use ratio of pos/neg */ + } /* in training data */ + negratio=1.0-posratio; + + learn_parm->svm_costratio=1.0; /* global */ + if(posratio>0) { + learn_parm->svm_costratio_unlab=negratio/posratio; + } + else { + learn_parm->svm_costratio_unlab=1.0; + } + + pos=0; + neg=0; + upos=0; + uneg=0; + for(i=0;ib); /* 'distance' from hyperplane*/ + if(dist>0) { + pos++; + } + else { + neg++; + } + if(unlabeled[i]) { + if(dist>0) { + upos++; + } + else { + uneg++; + } + } + if((!unlabeled[i]) && (a[i]>(learn_parm->svm_cost[i]-learn_parm->epsilon_a))) { + /* C_PRINTF("Ubounded %ld (class %ld, unlabeled %ld)\n",i,label[i],unlabeled[i]); */ + } + } + if(verbosity>=2) { + C_PRINTF("POS=%ld, ORGPOS=%ld, ORGNEG=%ld\n",pos,orgpos,orgneg); + C_PRINTF("POS=%ld, NEWPOS=%ld, NEWNEG=%ld\n",pos,newpos,newneg); + C_PRINTF("pos ratio = %f (%f).\n",(double)(upos)/(double)(allunlab),posratio); + C_FFLUSH(stdout); + } + + if(transductcycle == 0) { + j1=0; + j2=0; + j4=0; + for(i=0;ib); /* 'distance' from hyperplane*/ + if((label[i]==0) && (unlabeled[i])) { + selcrit[j4]=dist; + key[j4]=i; + j4++; + } + } + unsupaddnum1=0; + unsupaddnum2=0; + select_top_n(selcrit,j4,select,(long)(allunlab*posratio+0.5)); + for(k=0;(k<(long)(allunlab*posratio+0.5));k++) { + i=key[select[k]]; + label[i]=1; + unsupaddnum1++; + j1++; + } + for(i=0;isvm_cost[i]=learn_parm->svm_c* + learn_parm->svm_costratio_unlab*learn_parm->svm_unlabbound; + } + else if(label[i] == -1) { + learn_parm->svm_cost[i]=learn_parm->svm_c* + learn_parm->svm_unlabbound; + } + } + } + if(verbosity>=1) { + /* C_PRINTF("costratio %f, costratio_unlab %f, unlabbound %f\n", + learn_parm->svm_costratio,learn_parm->svm_costratio_unlab, + learn_parm->svm_unlabbound); */ + C_PRINTF("Classifying unlabeled data as %ld POS / %ld NEG.\n", + unsupaddnum1,unsupaddnum2); + C_FFLUSH(stdout); + } + if(verbosity >= 1) + C_PRINTF("Retraining."); + if(verbosity >= 2) C_PRINTF("\n"); + return((long)3); + } + if((transductcycle % check_every) == 0) { + if(verbosity >= 1) + C_PRINTF("Retraining."); + if(verbosity >= 2) C_PRINTF("\n"); + j1=0; + j2=0; + unsupaddnum1=0; + unsupaddnum2=0; + for(i=0;isvm_cost[i]=learn_parm->svm_c* + learn_parm->svm_costratio_unlab*learn_parm->svm_unlabbound; + } + else if(label[i] == -1) { + learn_parm->svm_cost[i]=learn_parm->svm_c* + learn_parm->svm_unlabbound; + } + } + } + + if(verbosity>=2) { + /* C_PRINTF("costratio %f, costratio_unlab %f, unlabbound %f\n", + learn_parm->svm_costratio,learn_parm->svm_costratio_unlab, + learn_parm->svm_unlabbound); */ + C_PRINTF("%ld positive -> Added %ld POS / %ld NEG unlabeled examples.\n", + upos,unsupaddnum1,unsupaddnum2); + C_FFLUSH(stdout); + } + + if(learn_parm->svm_unlabbound == 1) { + learn_parm->epsilon_crit=0.001; /* do the last run right */ + } + else { + learn_parm->epsilon_crit=0.01; /* otherwise, no need to be so picky */ + } + + return((long)3); + } + else if(((transductcycle % check_every) < check_every)) { + model_length=0; + sumalpha=0; + loss=0; + for(i=0;ib); /* 'distance' from hyperplane*/ + if((label[i]*dist)<(1.0-learn_parm->epsilon_crit)) { + loss+=(1.0-(label[i]*dist))*learn_parm->svm_cost[i]; + } + } + model_length=sqrt(model_length); + if(verbosity>=2) { + C_PRINTF("Model-length = %f (%f), loss = %f, objective = %f\n", + model_length,sumalpha,loss,loss+0.5*model_length*model_length); + C_FFLUSH(stdout); + } + j1=0; + j2=0; + j3=0; + j4=0; + unsupaddnum1=0; + unsupaddnum2=0; + umin=99999; + umax=-99999; + j4=1; + while(j4) { + umin=99999; + umax=-99999; + for(i=0;(ib); + if((label[i]>0) && (unlabeled[i]) && (!inconsistent[i]) + && (distumax)) { + umax=dist; + imax=i; + } + } + if((umin < (umax+switchsens-1E-4))) { + j1++; + j2++; + unsupaddnum1++; + unlabeled[imin]=3; + inconsistent[imin]=1; + unsupaddnum2++; + unlabeled[imax]=2; + inconsistent[imax]=1; + } + else + j4=0; + j4=0; + } + for(j=0;(j0) { + unlabeled[j]=2; + } + else if(label[j]<0) { + unlabeled[j]=3; + } + /* inconsistent[j]=1; */ + j3++; + } + } + switchnum+=unsupaddnum1+unsupaddnum2; + + /* stop and print out current margin + C_PRINTF("switchnum %ld %ld\n",switchnum,kernel_parm->poly_degree); + if(switchnum == 2*kernel_parm->poly_degree) { + learn_parm->svm_unlabbound=1; + } + */ + + if((!unsupaddnum1) && (!unsupaddnum2)) { + if((learn_parm->svm_unlabbound>=1) && ((newpos+newneg) == allunlab)) { + for(j=0;(jpredfile,model,lin,a,unlabeled,label, + totdoc,learn_parm); + if(verbosity>=1) + C_PRINTF("Number of switches: %ld\n",switchnum); + return((long)0); + } + switchsens=switchsensorg; + learn_parm->svm_unlabbound*=1.5; + if(learn_parm->svm_unlabbound>1) { + learn_parm->svm_unlabbound=1; + } + model->at_upper_bound=0; /* since upper bound increased */ + if(verbosity>=1) + C_PRINTF("Increasing influence of unlabeled examples to %f%% .", + learn_parm->svm_unlabbound*100.0); + } + else if(verbosity>=1) { + C_PRINTF("%ld positive -> Switching labels of %ld POS / %ld NEG unlabeled examples.", + upos,unsupaddnum1,unsupaddnum2); + C_FFLUSH(stdout); + } + + if(verbosity >= 2) C_PRINTF("\n"); + + learn_parm->epsilon_crit=0.5; /* don't need to be so picky */ + + for(i=0;isvm_cost[i]=learn_parm->svm_c* + learn_parm->svm_costratio_unlab*learn_parm->svm_unlabbound; + } + else if(label[i] == -1) { + learn_parm->svm_cost[i]=learn_parm->svm_c* + learn_parm->svm_unlabbound; + } + } + } + + return((long)2); + } + + return((long)0); +} + +/*************************** Working set selection ***************************/ + +long select_next_qp_subproblem_grad(long int *label, + long int *unlabeled, + double *a, double *lin, + double *c, long int totdoc, + long int qp_size, + LEARN_PARM *learn_parm, + long int *inconsistent, + long int *active2dnum, + long int *working2dnum, + double *selcrit, + long int *select, + KERNEL_CACHE *kernel_cache, + long int cache_only, + long int *key, long int *chosen) + /* Use the feasible direction approach to select the next + qp-subproblem (see chapter 'Selecting a good working set'). If + 'cache_only' is true, then the variables are selected only among + those for which the kernel evaluations are cached. */ +{ + long choosenum,i,j,k,activedoc,inum,valid; + double s; + + for(inum=0;working2dnum[inum]>=0;inum++); /* find end of index */ + choosenum=0; + activedoc=0; + for(i=0;(j=active2dnum[i])>=0;i++) { + s=-label[j]; + if(kernel_cache && cache_only) + valid=(kernel_cache->index[j]>=0); + else + valid=1; + if(valid + && (!((a[j]<=(0+learn_parm->epsilon_a)) && (s<0))) + && (!((a[j]>=(learn_parm->svm_cost[j]-learn_parm->epsilon_a)) + && (s>0))) + && (!chosen[j]) + && (label[j]) + && (!inconsistent[j])) + { + selcrit[activedoc]=(double)label[j]*(learn_parm->eps-(double)label[j]*c[j]+(double)label[j]*lin[j]); + /* selcrit[activedoc]=(double)label[j]*(-1.0+(double)label[j]*lin[j]); */ + key[activedoc]=j; + activedoc++; + } + } + select_top_n(selcrit,activedoc,select,(long)(qp_size/2)); + for(k=0;(choosenum<(qp_size/2)) && (k<(qp_size/2)) && (kbiased_hyperplane || (selcrit[select[k]] > 0)) { */ + i=key[select[k]]; + chosen[i]=1; + working2dnum[inum+choosenum]=i; + choosenum+=1; + if(kernel_cache) + kernel_cache_touch(kernel_cache,i); /* make sure it does not get + kicked out of cache */ + /* } */ + } + + activedoc=0; + for(i=0;(j=active2dnum[i])>=0;i++) { + s=label[j]; + if(kernel_cache && cache_only) + valid=(kernel_cache->index[j]>=0); + else + valid=1; + if(valid + && (!((a[j]<=(0+learn_parm->epsilon_a)) && (s<0))) + && (!((a[j]>=(learn_parm->svm_cost[j]-learn_parm->epsilon_a)) + && (s>0))) + && (!chosen[j]) + && (label[j]) + && (!inconsistent[j])) + { + selcrit[activedoc]=-(double)label[j]*(learn_parm->eps-(double)label[j]*c[j]+(double)label[j]*lin[j]); + /* selcrit[activedoc]=-(double)(label[j]*(-1.0+(double)label[j]*lin[j])); */ + key[activedoc]=j; + activedoc++; + } + } + select_top_n(selcrit,activedoc,select,(long)(qp_size/2)); + for(k=0;(choosenumbiased_hyperplane || (selcrit[select[k]] > 0)) { */ + i=key[select[k]]; + chosen[i]=1; + working2dnum[inum+choosenum]=i; + choosenum+=1; + if(kernel_cache) + kernel_cache_touch(kernel_cache,i); /* make sure it does not get + kicked out of cache */ + /* } */ + } + working2dnum[inum+choosenum]=-1; /* complete index */ + return(choosenum); +} + +long select_next_qp_subproblem_rand(long int *label, + long int *unlabeled, + double *a, double *lin, + double *c, long int totdoc, + long int qp_size, + LEARN_PARM *learn_parm, + long int *inconsistent, + long int *active2dnum, + long int *working2dnum, + double *selcrit, + long int *select, + KERNEL_CACHE *kernel_cache, + long int *key, + long int *chosen, + long int iteration) +/* Use the feasible direction approach to select the next + qp-subproblem (see section 'Selecting a good working set'). Chooses + a feasible direction at (pseudo) random to help jump over numerical + problem. */ +{ + long choosenum,i,j,k,activedoc,inum; + double s; + + for(inum=0;working2dnum[inum]>=0;inum++); /* find end of index */ + choosenum=0; + activedoc=0; + for(i=0;(j=active2dnum[i])>=0;i++) { + s=-label[j]; + if((!((a[j]<=(0+learn_parm->epsilon_a)) && (s<0))) + && (!((a[j]>=(learn_parm->svm_cost[j]-learn_parm->epsilon_a)) + && (s>0))) + && (!inconsistent[j]) + && (label[j]) + && (!chosen[j])) { + selcrit[activedoc]=(j+iteration) % totdoc; + key[activedoc]=j; + activedoc++; + } + } + select_top_n(selcrit,activedoc,select,(long)(qp_size/2)); + for(k=0;(choosenum<(qp_size/2)) && (k<(qp_size/2)) && (k=0;i++) { + s=label[j]; + if((!((a[j]<=(0+learn_parm->epsilon_a)) && (s<0))) + && (!((a[j]>=(learn_parm->svm_cost[j]-learn_parm->epsilon_a)) + && (s>0))) + && (!inconsistent[j]) + && (label[j]) + && (!chosen[j])) { + selcrit[activedoc]=(j+iteration) % totdoc; + key[activedoc]=j; + activedoc++; + } + } + select_top_n(selcrit,activedoc,select,(long)(qp_size/2)); + for(k=0;(choosenum=0;ii++) { + ex_c=learn_parm->svm_c-learn_parm->epsilon_a; + if(alphaslack[docs[i]->slackid] >= ex_c) { + dist=(lin[i])*(double)label[i]+slack[docs[i]->slackid]; /* distance */ + target=-(learn_parm->eps-(double)label[i]*c[i]); /* rhs of constraint */ + if((a[i]>learn_parm->epsilon_a) && (dist > target)) { + if((dist-target)>maxdiff) { /* largest violation */ + maxdiff=dist-target; + maxdiffid=docs[i]->slackid; + } + } + } + } + (*maxviol)=maxdiff; + return(maxdiffid); +} + + +void select_top_n(double *selcrit, long int range, long int *select, + long int n) +{ + register long i,j; + + for(i=0;(i=0;j--) { + if((j>0) && (selcrit[select[j-1]]0) { + for(i=n;iselcrit[select[n-1]]) { + for(j=n-1;j>=0;j--) { + if((j>0) && (selcrit[select[j-1]]deactnum=0; + shrink_state->active = (long *)my_malloc(sizeof(long)*totdoc); + shrink_state->inactive_since = (long *)my_malloc(sizeof(long)*totdoc); + shrink_state->a_history = (double **)my_malloc(sizeof(double *)*maxhistory); + shrink_state->maxhistory=maxhistory; + shrink_state->last_lin = (double *)my_malloc(sizeof(double)*totdoc); + shrink_state->last_a = (double *)my_malloc(sizeof(double)*totdoc); + + for(i=0;iactive[i]=1; + shrink_state->inactive_since[i]=0; + shrink_state->last_a[i]=0; + shrink_state->last_lin[i]=0; + } +} + +void shrink_state_cleanup(SHRINK_STATE *shrink_state) +{ + free(shrink_state->active); + free(shrink_state->inactive_since); + if(shrink_state->deactnum > 0) + free(shrink_state->a_history[shrink_state->deactnum-1]); + free(shrink_state->a_history); + free(shrink_state->last_a); + free(shrink_state->last_lin); +} + +long shrink_problem(DOC **docs, + LEARN_PARM *learn_parm, + SHRINK_STATE *shrink_state, + KERNEL_PARM *kernel_parm, + long int *active2dnum, + long int *last_suboptimal_at, + long int iteration, + long int totdoc, + long int minshrink, + double *a, + long int *inconsistent) + /* Shrink some variables away. Do the shrinking only if at least + minshrink variables can be removed. */ +{ + long i,ii,change,activenum,lastiter; + double *a_old; + + activenum=0; + change=0; + for(ii=0;active2dnum[ii]>=0;ii++) { + i=active2dnum[ii]; + activenum++; + if(learn_parm->sharedslack) + lastiter=last_suboptimal_at[docs[i]->slackid]; + else + lastiter=last_suboptimal_at[i]; + if(((iteration-lastiter) > learn_parm->svm_iter_to_shrink) + || (inconsistent[i])) { + change++; + } + } + if((change>=minshrink) /* shrink only if sufficiently many candidates */ + && (shrink_state->deactnummaxhistory)) { /* and enough memory */ + /* Shrink problem by removing those variables which are */ + /* optimal at a bound for a minimum number of iterations */ + if(verbosity>=2) { + C_PRINTF(" Shrinking..."); C_FFLUSH(stdout); + } + if(kernel_parm->kernel_type != LINEAR) { /* non-linear case save alphas */ + a_old=(double *)my_malloc(sizeof(double)*totdoc); + shrink_state->a_history[shrink_state->deactnum]=a_old; + for(i=0;i=0;ii++) { + i=active2dnum[ii]; + if(learn_parm->sharedslack) + lastiter=last_suboptimal_at[docs[i]->slackid]; + else + lastiter=last_suboptimal_at[i]; + if(((iteration-lastiter) > learn_parm->svm_iter_to_shrink) + || (inconsistent[i])) { + shrink_state->active[i]=0; + shrink_state->inactive_since[i]=shrink_state->deactnum; + } + } + activenum=compute_index(shrink_state->active,totdoc,active2dnum); + shrink_state->deactnum++; + if(kernel_parm->kernel_type == LINEAR) { + shrink_state->deactnum=0; + } + if(verbosity>=2) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + C_PRINTF(" Number of inactive variables = %ld\n",totdoc-activenum); + } + } + return(activenum); +} + + +void reactivate_inactive_examples(long int *label, + long int *unlabeled, + double *a, + SHRINK_STATE *shrink_state, + double *lin, + double *c, + long int totdoc, + long int totwords, + long int iteration, + LEARN_PARM *learn_parm, + long int *inconsistent, + DOC **docs, + KERNEL_PARM *kernel_parm, + KERNEL_CACHE *kernel_cache, + MODEL *model, + CFLOAT *aicache, + double *weights, + double *maxdiff) + /* Make all variables active again which had been removed by + shrinking. */ + /* Computes lin for those variables from scratch. */ +{ + register long i,j,ii,jj,t,*changed2dnum,*inactive2dnum; + long *changed,*inactive; + register double kernel_val,*a_old,dist; + double ex_c,target; + SVECTOR *f; + + if(kernel_parm->kernel_type == LINEAR) { /* special linear case */ + a_old=shrink_state->last_a; + clear_vector_n(weights,totwords); + for(i=0;ifvec;f;f=f->next) + add_vector_ns(weights,f, + f->factor*((a[i]-a_old[i])*(double)label[i])); + a_old[i]=a[i]; + } + } + for(i=0;iactive[i]) { + for(f=docs[i]->fvec;f;f=f->next) + lin[i]=shrink_state->last_lin[i]+f->factor*sprod_ns(weights,f); + } + shrink_state->last_lin[i]=lin[i]; + } + } + else { + changed=(long *)my_malloc(sizeof(long)*totdoc); + changed2dnum=(long *)my_malloc(sizeof(long)*(totdoc+11)); + inactive=(long *)my_malloc(sizeof(long)*totdoc); + inactive2dnum=(long *)my_malloc(sizeof(long)*(totdoc+11)); + for(t=shrink_state->deactnum-1;(t>=0) && shrink_state->a_history[t];t--) { + if(verbosity>=2) { + C_PRINTF("%ld..",t); C_FFLUSH(stdout); + } + a_old=shrink_state->a_history[t]; + for(i=0;iactive[i]) + && (shrink_state->inactive_since[i] == t)); + changed[i]= (a[i] != a_old[i]); + } + compute_index(inactive,totdoc,inactive2dnum); + compute_index(changed,totdoc,changed2dnum); + + for(ii=0;(i=changed2dnum[ii])>=0;ii++) { + get_kernel_row(kernel_cache,docs,i,totdoc,inactive2dnum,aicache, + kernel_parm); + for(jj=0;(j=inactive2dnum[jj])>=0;jj++) { + kernel_val=aicache[j]; + lin[j]+=(((a[i]*kernel_val)-(a_old[i]*kernel_val))*(double)label[i]); + } + } + } + free(changed); + free(changed2dnum); + free(inactive); + free(inactive2dnum); + } + (*maxdiff)=0; + for(i=0;iinactive_since[i]=shrink_state->deactnum-1; + if(!inconsistent[i]) { + dist=(lin[i]-model->b)*(double)label[i]; + target=-(learn_parm->eps-(double)label[i]*c[i]); + ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a; + if((a[i]>learn_parm->epsilon_a) && (dist > target)) { + if((dist-target)>(*maxdiff)) /* largest violation */ + (*maxdiff)=dist-target; + } + else if((a[i](*maxdiff)) /* largest violation */ + (*maxdiff)=target-dist; + } + if((a[i]>(0+learn_parm->epsilon_a)) + && (a[i]active[i]=1; /* not at bound */ + } + else if((a[i]<=(0+learn_parm->epsilon_a)) && (dist < (target+learn_parm->epsilon_shrink))) { + shrink_state->active[i]=1; + } + else if((a[i]>=ex_c) + && (dist > (target-learn_parm->epsilon_shrink))) { + shrink_state->active[i]=1; + } + else if(learn_parm->sharedslack) { /* make all active when sharedslack */ + shrink_state->active[i]=1; + } + } + } + if(kernel_parm->kernel_type != LINEAR) { /* update history for non-linear */ + for(i=0;ia_history[shrink_state->deactnum-1])[i]=a[i]; + } + for(t=shrink_state->deactnum-2;(t>=0) && shrink_state->a_history[t];t--) { + free(shrink_state->a_history[t]); + shrink_state->a_history[t]=0; + } + } +} + +/****************************** Cache handling *******************************/ + +void get_kernel_row(KERNEL_CACHE *kernel_cache, DOC **docs, + long int docnum, long int totdoc, + long int *active2dnum, CFLOAT *buffer, + KERNEL_PARM *kernel_parm) + /* Get's a row of the matrix of kernel values This matrix has the + same form as the Hessian, just that the elements are not + multiplied by */ + /* y_i * y_j * a_i * a_j */ + /* Takes the values from the cache if available. */ +{ + register long i,j,start; + DOC *ex; + + ex=docs[docnum]; + + if(kernel_cache->index[docnum] != -1) { /* row is cached? */ + kernel_cache->lru[kernel_cache->index[docnum]]=kernel_cache->time; /* lru */ + start=kernel_cache->activenum*kernel_cache->index[docnum]; + for(i=0;(j=active2dnum[i])>=0;i++) { + if(kernel_cache->totdoc2active[j] >= 0) { /* column is cached? */ + buffer[j]=kernel_cache->buffer[start+kernel_cache->totdoc2active[j]]; + } + else { + buffer[j]=(CFLOAT)kernel(kernel_parm,ex,docs[j]); + } + } + } + else { + for(i=0;(j=active2dnum[i])>=0;i++) { + buffer[j]=(CFLOAT)kernel(kernel_parm,ex,docs[j]); + } + } +} + + +void cache_kernel_row(KERNEL_CACHE *kernel_cache, DOC **docs, + long int m, KERNEL_PARM *kernel_parm) + /* Fills cache for the row m */ +{ + register DOC *ex; + register long j,k,l; + register CFLOAT *cache; + + if(!kernel_cache_check(kernel_cache,m)) { /* not cached yet*/ + cache = kernel_cache_clean_and_malloc(kernel_cache,m); + if(cache) { + l=kernel_cache->totdoc2active[m]; + ex=docs[m]; + for(j=0;jactivenum;j++) { /* fill cache */ + k=kernel_cache->active2totdoc[j]; + if((kernel_cache->index[k] != -1) && (l != -1) && (k != m)) { + cache[j]=kernel_cache->buffer[kernel_cache->activenum + *kernel_cache->index[k]+l]; + } + else { + cache[j]=kernel(kernel_parm,ex,docs[k]); + } + } + } + else { + perror("Error: Kernel cache full! => increase cache size"); + } + } +} + + +void cache_multiple_kernel_rows(KERNEL_CACHE *kernel_cache, DOC **docs, + long int *key, long int varnum, + KERNEL_PARM *kernel_parm) + /* Fills cache for the rows in key */ +{ + register long i; + + for(i=0;i=2) { + C_PRINTF(" Reorganizing cache..."); C_FFLUSH(stdout); + } + + keep=(long *)my_malloc(sizeof(long)*totdoc); + for(j=0;jactivenum) && (scountactive2totdoc[jj]; + if(!after[j]) { + scount++; + keep[j]=0; + } + } + + for(i=0;imax_elems;i++) { + for(jj=0;jjactivenum;jj++) { + j=kernel_cache->active2totdoc[jj]; + if(!keep[j]) { + from++; + } + else { + kernel_cache->buffer[to]=kernel_cache->buffer[from]; + to++; + from++; + } + } + } + + kernel_cache->activenum=0; + for(j=0;jtotdoc2active[j] != -1)) { + kernel_cache->active2totdoc[kernel_cache->activenum]=j; + kernel_cache->totdoc2active[j]=kernel_cache->activenum; + kernel_cache->activenum++; + } + else { + kernel_cache->totdoc2active[j]=-1; + } + } + + kernel_cache->max_elems=(long)(kernel_cache->buffsize/kernel_cache->activenum); + if(kernel_cache->max_elems>totdoc) { + kernel_cache->max_elems=totdoc; + } + + free(keep); + + if(verbosity>=2) { + C_PRINTF("done.\n"); C_FFLUSH(stdout); + C_PRINTF(" Cache-size in rows = %ld\n",kernel_cache->max_elems); + } +} + +KERNEL_CACHE *kernel_cache_init(long int totdoc, long int buffsize) +{ + long i; + KERNEL_CACHE *kernel_cache; + + kernel_cache=(KERNEL_CACHE *)my_malloc(sizeof(KERNEL_CACHE)); + kernel_cache->index = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->occu = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->lru = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->invindex = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->active2totdoc = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->totdoc2active = (long *)my_malloc(sizeof(long)*totdoc); + kernel_cache->buffer = (CFLOAT *)my_malloc((size_t)(buffsize)*1024*1024); + + kernel_cache->buffsize=(long)(buffsize/sizeof(CFLOAT)*1024*1024); + + kernel_cache->max_elems=(long)(kernel_cache->buffsize/totdoc); + if(kernel_cache->max_elems>totdoc) { + kernel_cache->max_elems=totdoc; + } + + if(verbosity>=2) { + C_PRINTF(" Cache-size in rows = %ld\n",kernel_cache->max_elems); + C_PRINTF(" Kernel evals so far: %ld\n",kernel_cache_statistic); + } + + kernel_cache->elems=0; /* initialize cache */ + for(i=0;iindex[i]=-1; + kernel_cache->lru[i]=0; + } + for(i=0;ioccu[i]=0; + kernel_cache->invindex[i]=-1; + } + + kernel_cache->activenum=totdoc;; + for(i=0;iactive2totdoc[i]=i; + kernel_cache->totdoc2active[i]=i; + } + + kernel_cache->time=0; + + return(kernel_cache); +} + +void kernel_cache_reset_lru(KERNEL_CACHE *kernel_cache) +{ + long maxlru=0,k; + + for(k=0;kmax_elems;k++) { + if(maxlru < kernel_cache->lru[k]) + maxlru=kernel_cache->lru[k]; + } + for(k=0;kmax_elems;k++) { + kernel_cache->lru[k]-=maxlru; + } +} + +void kernel_cache_cleanup(KERNEL_CACHE *kernel_cache) +{ + free(kernel_cache->index); + free(kernel_cache->occu); + free(kernel_cache->lru); + free(kernel_cache->invindex); + free(kernel_cache->active2totdoc); + free(kernel_cache->totdoc2active); + free(kernel_cache->buffer); + free(kernel_cache); +} + +long kernel_cache_malloc(KERNEL_CACHE *kernel_cache) +{ + long i; + + if(kernel_cache_space_available(kernel_cache)) { + for(i=0;imax_elems;i++) { + if(!kernel_cache->occu[i]) { + kernel_cache->occu[i]=1; + kernel_cache->elems++; + return(i); + } + } + } + return(-1); +} + +void kernel_cache_free(KERNEL_CACHE *kernel_cache, long int i) +{ + kernel_cache->occu[i]=0; + kernel_cache->elems--; +} + +long kernel_cache_free_lru(KERNEL_CACHE *kernel_cache) + /* remove least recently used cache element */ +{ + register long k,least_elem=-1,least_time; + + least_time=kernel_cache->time+1; + for(k=0;kmax_elems;k++) { + if(kernel_cache->invindex[k] != -1) { + if(kernel_cache->lru[k]lru[k]; + least_elem=k; + } + } + } + if(least_elem != -1) { + kernel_cache_free(kernel_cache,least_elem); + kernel_cache->index[kernel_cache->invindex[least_elem]]=-1; + kernel_cache->invindex[least_elem]=-1; + return(1); + } + return(0); +} + + +CFLOAT *kernel_cache_clean_and_malloc(KERNEL_CACHE *kernel_cache, + long int docnum) + /* Get a free cache entry. In case cache is full, the lru element + is removed. */ +{ + long result; + if((result = kernel_cache_malloc(kernel_cache)) == -1) { + if(kernel_cache_free_lru(kernel_cache)) { + result = kernel_cache_malloc(kernel_cache); + } + } + kernel_cache->index[docnum]=result; + if(result == -1) { + return(0); + } + kernel_cache->invindex[result]=docnum; + kernel_cache->lru[kernel_cache->index[docnum]]=kernel_cache->time; /* lru */ + return((CFLOAT *)((intptr_t)kernel_cache->buffer + +(kernel_cache->activenum*sizeof(CFLOAT)* + kernel_cache->index[docnum]))); +} + +long kernel_cache_touch(KERNEL_CACHE *kernel_cache, long int docnum) + /* Update lru time to avoid removal from cache. */ +{ + if(kernel_cache && kernel_cache->index[docnum] != -1) { + kernel_cache->lru[kernel_cache->index[docnum]]=kernel_cache->time; /* lru */ + return(1); + } + return(0); +} + +long kernel_cache_check(KERNEL_CACHE *kernel_cache, long int docnum) + /* Is that row cached? */ +{ + return(kernel_cache->index[docnum] != -1); +} + +long kernel_cache_space_available(KERNEL_CACHE *kernel_cache) + /* Is there room for one more row? */ +{ + return(kernel_cache->elems < kernel_cache->max_elems); +} + +/************************** Compute estimates ******************************/ + +void compute_xa_estimates(MODEL *model, long int *label, + long int *unlabeled, long int totdoc, + DOC **docs, double *lin, double *a, + KERNEL_PARM *kernel_parm, + LEARN_PARM *learn_parm, double *error, + double *recall, double *precision) + /* Computes xa-estimate of error rate, recall, and precision. See + T. Joachims, Estimating the Generalization Performance of an SVM + Efficiently, IMCL, 2000. */ +{ + long i,looerror,looposerror,loonegerror; + long totex,totposex; + double xi,r_delta,r_delta_sq,sim=0; + long *sv2dnum=NULL,*sv=NULL,svnum; + + r_delta=estimate_r_delta(docs,totdoc,kernel_parm); + r_delta_sq=r_delta*r_delta; + + looerror=0; + looposerror=0; + loonegerror=0; + totex=0; + totposex=0; + svnum=0; + + if(learn_parm->xa_depth > 0) { + sv = (long *)my_malloc(sizeof(long)*(totdoc+11)); + for(i=0;isv_num;i++) + if(a[model->supvec[i]->docnum] + < (learn_parm->svm_cost[model->supvec[i]->docnum] + -learn_parm->epsilon_a)) { + sv[model->supvec[i]->docnum]=1; + svnum++; + } + sv2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11)); + clear_index(sv2dnum); + compute_index(sv,totdoc,sv2dnum); + } + + for(i=0;ib)*(double)label[i]); + if(xi<0) xi=0; + if(label[i]>0) { + totposex++; + } + if((learn_parm->rho*a[i]*r_delta_sq+xi) >= 1.0) { + if(learn_parm->xa_depth > 0) { /* makes assumptions */ + sim=distribute_alpha_t_greedily(sv2dnum,svnum,docs,a,i,label, + kernel_parm,learn_parm, + (double)((1.0-xi-a[i]*r_delta_sq)/(2.0*a[i]))); + } + if((learn_parm->xa_depth == 0) || + ((a[i]*kernel(kernel_parm,docs[i],docs[i])+a[i]*2.0*sim+xi) >= 1.0)) { + looerror++; + if(label[i]>0) { + looposerror++; + } + else { + loonegerror++; + } + } + } + totex++; + } + } + + (*error)=((double)looerror/(double)totex)*100.0; + (*recall)=(1.0-(double)looposerror/(double)totposex)*100.0; + (*precision)=(((double)totposex-(double)looposerror) + /((double)totposex-(double)looposerror+(double)loonegerror))*100.0; + + free(sv); + free(sv2dnum); +} + + +double distribute_alpha_t_greedily(long int *sv2dnum, long int svnum, + DOC **docs, double *a, + long int docnum, + long int *label, + KERNEL_PARM *kernel_parm, + LEARN_PARM *learn_parm, double thresh) + /* Experimental Code improving plain XiAlpha Estimates by + computing a better bound using a greedy optimzation strategy. */ +{ + long best_depth=0; + long i,j,k,d,skip,allskip; + double best,best_val[101],val,init_val_sq,init_val_lin; + long best_ex[101]; + CFLOAT *cache,*trow; + + cache=(CFLOAT *)my_malloc(sizeof(CFLOAT)*learn_parm->xa_depth*svnum); + trow = (CFLOAT *)my_malloc(sizeof(CFLOAT)*svnum); + + for(k=0;kxa_depth;d++) { + allskip=1; + if(d>=1) { + init_val_sq+=cache[best_ex[d-1]+svnum*(d-1)]; + for(k=0;kkernel_type == LINEAR) + val+=docs[sv2dnum[i]]->fvec->twonorm_sq; + else + val+=kernel(kernel_parm,docs[sv2dnum[i]],docs[sv2dnum[i]]); + for(j=0;jxa_depth; + } + } + + free(cache); + free(trow); + + /* C_PRINTF("Distribute[%ld](%ld)=%f, ",docnum,best_depth,best); */ + return(best); +} + + +void estimate_transduction_quality(MODEL *model, long int *label, + long int *unlabeled, + long int totdoc, DOC **docs, double *lin) + /* Loo-bound based on observation that loo-errors must have an + equal distribution in both training and test examples, given + that the test examples are classified correctly. Compare + chapter "Constraints on the Transductive Hyperplane" in my + Dissertation. */ +{ + long i,j,l=0,ulab=0,lab=0,labpos=0,labneg=0,ulabpos=0,ulabneg=0,totulab=0; + double totlab=0,totlabpos=0,totlabneg=0,labsum=0,ulabsum=0; + double r_delta,r_delta_sq,xi,xisum=0,asum=0; + + r_delta=estimate_r_delta(docs,totdoc,&(model->kernel_parm)); + r_delta_sq=r_delta*r_delta; + + for(j=0;j 0) + totlabpos++; + else + totlabneg++; + } + } + for(j=1;jsv_num;j++) { + i=model->supvec[j]->docnum; + xi=1.0-((lin[i]-model->b)*(double)label[i]); + if(xi<0) xi=0; + + xisum+=xi; + asum+=fabs(model->alpha[j]); + if(unlabeled[i]) { + ulabsum+=(fabs(model->alpha[j])*r_delta_sq+xi); + } + else { + labsum+=(fabs(model->alpha[j])*r_delta_sq+xi); + } + if((fabs(model->alpha[j])*r_delta_sq+xi) >= 1) { + l++; + if(unlabeled[model->supvec[j]->docnum]) { + ulab++; + if(model->alpha[j] > 0) + ulabpos++; + else + ulabneg++; + } + else { + lab++; + if(model->alpha[j] > 0) + labpos++; + else + labneg++; + } + } + } + C_PRINTF("xacrit>=1: labeledpos=%.5f labeledneg=%.5f default=%.5f\n",(double)labpos/(double)totlab*100.0,(double)labneg/(double)totlab*100.0,(double)totlabpos/(double)(totlab)*100.0); + C_PRINTF("xacrit>=1: unlabelpos=%.5f unlabelneg=%.5f\n",(double)ulabpos/(double)totulab*100.0,(double)ulabneg/(double)totulab*100.0); + C_PRINTF("xacrit>=1: labeled=%.5f unlabled=%.5f all=%.5f\n",(double)lab/(double)totlab*100.0,(double)ulab/(double)totulab*100.0,(double)l/(double)(totdoc)*100.0); + C_PRINTF("xacritsum: labeled=%.5f unlabled=%.5f all=%.5f\n",(double)labsum/(double)totlab*100.0,(double)ulabsum/(double)totulab*100.0,(double)(labsum+ulabsum)/(double)(totdoc)*100.0); + C_PRINTF("r_delta_sq=%.5f xisum=%.5f asum=%.5f\n",r_delta_sq,xisum,asum); +} + +double estimate_margin_vcdim(MODEL *model, double w, double R, + KERNEL_PARM *kernel_parm) + /* optional: length of model vector in feature space */ + /* optional: radius of ball containing the data */ +{ + double h; + + /* follows chapter 5.6.4 in [Vapnik/95] */ + + if(w<0) { + w=model_length_s(model,kernel_parm); + } + if(R<0) { + R=estimate_sphere(model,kernel_parm); + } + h = w*w * R*R +1; + return(h); +} + +double estimate_sphere(MODEL *model, KERNEL_PARM *kernel_parm) + /* Approximates the radius of the ball containing */ + /* the support vectors by bounding it with the */ +{ /* length of the longest support vector. This is */ + register long j; /* pretty good for text categorization, since all */ + double xlen,maxxlen=0; /* documents have feature vectors of length 1. It */ + DOC *nulldoc; /* assumes that the center of the ball is at the */ + WORD nullword; /* origin of the space. */ + + nullword.wnum=0; + nulldoc=create_example(-2,0,0,0.0,create_svector(&nullword,"",1.0)); + + for(j=1;jsv_num;j++) { + xlen=sqrt(kernel(kernel_parm,model->supvec[j],model->supvec[j]) + -2*kernel(kernel_parm,model->supvec[j],nulldoc) + +kernel(kernel_parm,nulldoc,nulldoc)); + if(xlen>maxxlen) { + maxxlen=xlen; + } + } + + free_example(nulldoc,1); + return(maxxlen); +} + +double estimate_r_delta(DOC **docs, long int totdoc, KERNEL_PARM *kernel_parm) +{ + long i; + double maxxlen,xlen; + DOC *nulldoc; /* assumes that the center of the ball is at the */ + WORD nullword; /* origin of the space. */ + + nullword.wnum=0; + nulldoc=create_example(-2,0,0,0.0,create_svector(&nullword,"",1.0)); + + maxxlen=0; + for(i=0;imaxxlen) { + maxxlen=xlen; + } + } + + free_example(nulldoc,1); + return(maxxlen); +} + +double estimate_r_delta_average(DOC **docs, long int totdoc, + KERNEL_PARM *kernel_parm) +{ + long i; + double avgxlen; + DOC *nulldoc; /* assumes that the center of the ball is at the */ + WORD nullword; /* origin of the space. */ + + nullword.wnum=0; + nulldoc=create_example(-2,0,0,0.0,create_svector(&nullword,"",1.0)); + + avgxlen=0; + for(i=0;imaxxlen) { + maxxlen=xlen; + } + } + + return(maxxlen); +} + +/****************************** IO-handling **********************************/ + +void write_prediction(char *predfile, MODEL *model, double *lin, + double *a, long int *unlabeled, + long int *label, long int totdoc, + LEARN_PARM *learn_parm) +{ + FILE *predfl; + long i; + double dist,a_max; + + if(verbosity>=1) { + C_PRINTF("Writing prediction file..."); C_FFLUSH(stdout); + } + if ((predfl = fopen (predfile, "w")) == NULL) + { perror (predfile); C_EXIT (1); } + a_max=learn_parm->epsilon_a; + for(i=0;ia_max)) { + a_max=a[i]; + } + } + for(i=0;i(learn_parm->epsilon_a))) { + dist=(double)label[i]*(1.0-learn_parm->epsilon_crit-a[i]/(a_max*2.0)); + } + else { + dist=(lin[i]-model->b); + } + if(dist>0) { + C_FPRINTF(predfl,"%.8g:+1 %.8g:-1\n",dist,-dist); + } + else { + C_FPRINTF(predfl,"%.8g:-1 %.8g:+1\n",-dist,dist); + } + } + } + fclose(predfl); + if(verbosity>=1) { + C_PRINTF("done\n"); + } +} + +void write_alphas(char *alphafile, double *a, + long int *label, long int totdoc) +{ + FILE *alphafl; + long i; + + if(verbosity>=1) { + C_PRINTF("Writing alpha file..."); C_FFLUSH(stdout); + } + if ((alphafl = fopen (alphafile, "w")) == NULL) + { perror (alphafile); C_EXIT (1); } + for(i=0;i=1) { + C_PRINTF("done\n"); + } +} + diff --git a/src/symbols.rds b/src/symbols.rds deleted file mode 100755 index c73b1b86..00000000 Binary files a/src/symbols.rds and /dev/null differ diff --git a/src/utils/threading.cpp b/src/utils/threading.cpp new file mode 100644 index 00000000..c07e3b0b --- /dev/null +++ b/src/utils/threading.cpp @@ -0,0 +1,9 @@ +#include "threading.h" + +namespace gmum { + +void sleep(int ms) { + tthread::this_thread::sleep_for(tthread::chrono::milliseconds(ms)); +} + +} diff --git a/src/utils/tinythread.cpp b/src/utils/tinythread.cpp new file mode 100644 index 00000000..690eceea --- /dev/null +++ b/src/utils/tinythread.cpp @@ -0,0 +1,303 @@ +/* -*- mode: c++; tab-width: 2; indent-tabs-mode: nil; -*- +Copyright (c) 2010-2012 Marcus Geelnard + +This software is provided 'as-is', without any express or implied +warranty. In no event will the authors be held liable for any damages +arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, +including commercial applications, and to alter it and redistribute it +freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source + distribution. +*/ + +#include +#include "tinythread.h" + +#if defined(_TTHREAD_POSIX_) + #include + #include +#elif defined(_TTHREAD_WIN32_) + #include +#endif + + +namespace tthread { + +//------------------------------------------------------------------------------ +// condition_variable +//------------------------------------------------------------------------------ +// NOTE 1: The Win32 implementation of the condition_variable class is based on +// the corresponding implementation in GLFW, which in turn is based on a +// description by Douglas C. Schmidt and Irfan Pyarali: +// http://www.cs.wustl.edu/~schmidt/win32-cv-1.html +// +// NOTE 2: Windows Vista actually has native support for condition variables +// (InitializeConditionVariable, WakeConditionVariable, etc), but we want to +// be portable with pre-Vista Windows versions, so TinyThread++ does not use +// Vista condition variables. +//------------------------------------------------------------------------------ + +#if defined(_TTHREAD_WIN32_) + #define _CONDITION_EVENT_ONE 0 + #define _CONDITION_EVENT_ALL 1 +#endif + +#if defined(_TTHREAD_WIN32_) +condition_variable::condition_variable() : mWaitersCount(0) +{ + mEvents[_CONDITION_EVENT_ONE] = CreateEvent(NULL, FALSE, FALSE, NULL); + mEvents[_CONDITION_EVENT_ALL] = CreateEvent(NULL, TRUE, FALSE, NULL); + InitializeCriticalSection(&mWaitersCountLock); +} +#endif + +#if defined(_TTHREAD_WIN32_) +condition_variable::~condition_variable() +{ + CloseHandle(mEvents[_CONDITION_EVENT_ONE]); + CloseHandle(mEvents[_CONDITION_EVENT_ALL]); + DeleteCriticalSection(&mWaitersCountLock); +} +#endif + +#if defined(_TTHREAD_WIN32_) +void condition_variable::_wait() +{ + // Wait for either event to become signaled due to notify_one() or + // notify_all() being called + int result = WaitForMultipleObjects(2, mEvents, FALSE, INFINITE); + + // Check if we are the last waiter + EnterCriticalSection(&mWaitersCountLock); + -- mWaitersCount; + bool lastWaiter = (result == (WAIT_OBJECT_0 + _CONDITION_EVENT_ALL)) && + (mWaitersCount == 0); + LeaveCriticalSection(&mWaitersCountLock); + + // If we are the last waiter to be notified to stop waiting, reset the event + if(lastWaiter) + ResetEvent(mEvents[_CONDITION_EVENT_ALL]); +} +#endif + +#if defined(_TTHREAD_WIN32_) +void condition_variable::notify_one() +{ + // Are there any waiters? + EnterCriticalSection(&mWaitersCountLock); + bool haveWaiters = (mWaitersCount > 0); + LeaveCriticalSection(&mWaitersCountLock); + + // If we have any waiting threads, send them a signal + if(haveWaiters) + SetEvent(mEvents[_CONDITION_EVENT_ONE]); +} +#endif + +#if defined(_TTHREAD_WIN32_) +void condition_variable::notify_all() +{ + // Are there any waiters? + EnterCriticalSection(&mWaitersCountLock); + bool haveWaiters = (mWaitersCount > 0); + LeaveCriticalSection(&mWaitersCountLock); + + // If we have any waiting threads, send them a signal + if(haveWaiters) + SetEvent(mEvents[_CONDITION_EVENT_ALL]); +} +#endif + + +//------------------------------------------------------------------------------ +// POSIX pthread_t to unique thread::id mapping logic. +// Note: Here we use a global thread safe std::map to convert instances of +// pthread_t to small thread identifier numbers (unique within one process). +// This method should be portable across different POSIX implementations. +//------------------------------------------------------------------------------ + +#if defined(_TTHREAD_POSIX_) +static thread::id _pthread_t_to_ID(const pthread_t &aHandle) +{ + static mutex idMapLock; + static std::map idMap; + static unsigned long int idCount(1); + + lock_guard guard(idMapLock); + if(idMap.find(aHandle) == idMap.end()) + idMap[aHandle] = idCount ++; + return thread::id(idMap[aHandle]); +} +#endif // _TTHREAD_POSIX_ + + +//------------------------------------------------------------------------------ +// thread +//------------------------------------------------------------------------------ + +/// Information to pass to the new thread (what to run). +struct _thread_start_info { + void (*mFunction)(void *); ///< Pointer to the function to be executed. + void * mArg; ///< Function argument for the thread function. + thread * mThread; ///< Pointer to the thread object. +}; + +// Thread wrapper function. +#if defined(_TTHREAD_WIN32_) +unsigned WINAPI thread::wrapper_function(void * aArg) +#elif defined(_TTHREAD_POSIX_) +void * thread::wrapper_function(void * aArg) +#endif +{ + // Get thread startup information + _thread_start_info * ti = (_thread_start_info *) aArg; + + try + { + // Call the actual client thread function + ti->mFunction(ti->mArg); + } + catch(...) + { + // Uncaught exceptions will terminate the application (default behavior + // according to C++11) + std::terminate(); + } + + // The thread is no longer executing + lock_guard guard(ti->mThread->mDataMutex); + ti->mThread->mNotAThread = true; + + // The thread is responsible for freeing the startup information + delete ti; + + return 0; +} + +thread::thread(void (*aFunction)(void *), void * aArg) +{ + // Serialize access to this thread structure + lock_guard guard(mDataMutex); + + // Fill out the thread startup information (passed to the thread wrapper, + // which will eventually free it) + _thread_start_info * ti = new _thread_start_info; + ti->mFunction = aFunction; + ti->mArg = aArg; + ti->mThread = this; + + // The thread is now alive + mNotAThread = false; + + // Create the thread +#if defined(_TTHREAD_WIN32_) + mHandle = (HANDLE) _beginthreadex(0, 0, wrapper_function, (void *) ti, 0, &mWin32ThreadID); +#elif defined(_TTHREAD_POSIX_) + if(pthread_create(&mHandle, NULL, wrapper_function, (void *) ti) != 0) + mHandle = 0; +#endif + + // Did we fail to create the thread? + if(!mHandle) + { + mNotAThread = true; + delete ti; + } +} + +thread::~thread() +{ + if(joinable()) + std::terminate(); +} + +void thread::join() +{ + if(joinable()) + { +#if defined(_TTHREAD_WIN32_) + WaitForSingleObject(mHandle, INFINITE); + CloseHandle(mHandle); +#elif defined(_TTHREAD_POSIX_) + pthread_join(mHandle, NULL); +#endif + } +} + +bool thread::joinable() const +{ + mDataMutex.lock(); + bool result = !mNotAThread; + mDataMutex.unlock(); + return result; +} + +void thread::detach() +{ + mDataMutex.lock(); + if(!mNotAThread) + { +#if defined(_TTHREAD_WIN32_) + CloseHandle(mHandle); +#elif defined(_TTHREAD_POSIX_) + pthread_detach(mHandle); +#endif + mNotAThread = true; + } + mDataMutex.unlock(); +} + +thread::id thread::get_id() const +{ + if(!joinable()) + return id(); +#if defined(_TTHREAD_WIN32_) + return id((unsigned long int) mWin32ThreadID); +#elif defined(_TTHREAD_POSIX_) + return _pthread_t_to_ID(mHandle); +#endif +} + +unsigned thread::hardware_concurrency() +{ +#if defined(_TTHREAD_WIN32_) + SYSTEM_INFO si; + GetSystemInfo(&si); + return (int) si.dwNumberOfProcessors; +#elif defined(_SC_NPROCESSORS_ONLN) + return (int) sysconf(_SC_NPROCESSORS_ONLN); +#elif defined(_SC_NPROC_ONLN) + return (int) sysconf(_SC_NPROC_ONLN); +#else + // The standard requires this function to return zero if the number of + // hardware cores could not be determined. + return 0; +#endif +} + + +//------------------------------------------------------------------------------ +// this_thread +//------------------------------------------------------------------------------ + +thread::id this_thread::get_id() +{ +#if defined(_TTHREAD_WIN32_) + return thread::id((unsigned long int) GetCurrentThreadId()); +#elif defined(_TTHREAD_POSIX_) + return _pthread_t_to_ID(pthread_self()); +#endif +} + +} diff --git a/src/utils/utils.cpp b/src/utils/utils.cpp new file mode 100644 index 00000000..24221e42 --- /dev/null +++ b/src/utils/utils.cpp @@ -0,0 +1,101 @@ +#include +#include + +#include "utils/utils.h" + +int check_argc(const char* input) { + int argc = 0; + int len = strlen(input); + + bool reading_arg = false; + char c; + for (int i = 0; i < len; ++i) { + c = input[i]; + if (isspace(c)) { + reading_arg = false; + } else if (not reading_arg) { + reading_arg = true; + ++argc; + } + } + return argc; +} + +int check_argc(const std::string input) { + char* cstr = new char [input.length() + 1]; + std::strcpy(cstr, input.c_str()); + + int argc = check_argc(cstr); + + delete[] cstr; + return argc; +} + +char** to_argv(const char* input) { + int argc = check_argc(input); + + char** argv; + char* arg; + char* in_copy; + + if ((argv = (char**)malloc(argc * sizeof(char*))) == NULL) { + throw BasicException("to_argv(): argv memory allocation error"); + } + + in_copy = (char*)malloc(strlen(input) + 1); + strcpy(in_copy, input); + for (int i = 0; i < argc; ++i) { + // Whitespaces handled by isspace() function + arg = strtok(in_copy, " \t\n\v\f\r"); + in_copy = NULL; + if (arg == NULL) { + throw BasicException("to_argv(): argc is not equal to real count"); + } + // NOTE: sizeof(char) is always 1 + if ((argv[i] = (char*)malloc(strlen(arg) + 1)) == NULL) { + throw BasicException("to_argv(): argv[i] memory allocation error"); + } + strcpy(argv[i], arg); + } + return argv; +} + +char** to_argv(const std::string input) { + char* cstr = new char [input.length() + 1]; + std::strcpy(cstr, input.c_str()); + + char** argv = to_argv(cstr); + + delete[] cstr; + return argv; +} + +char** free_argv(int argc, char** argv) { + for (int i = 0; i < argc; ++i) { + free(argv[i]); + } + free(argv); + return NULL; +} + +#ifdef RCPP_INTERFACE +int rcpp_c_rand() { + return int(Rcpp::runif(1)[0] * INT_MAX); +} +#endif + +int ed_c_rand() { +#ifdef RCPP_INTERFACE + return rcpp_c_rand(); +#else + return rand(); +#endif +} + +void ed_c_srand(unsigned int seed) { +#ifdef RCPP_INTERFACE + Rcpp::Environment::global_env()[".Random.seed"] = seed; +#else + srand(seed); +#endif +} diff --git a/tests/.Rhistory b/tests/.Rhistory new file mode 100644 index 00000000..e69de29b diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..f53f2840 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,10 @@ +Tests +===== + +This directory contains test for both **C++** and **R** (C++ <-> R integration). + +See also: + +* [cpp/README.md](cpp/README.md) +* [testthat/README.md](testthat/README.md) + diff --git a/tests/cpp/.gitignore b/tests/cpp/.gitignore new file mode 100644 index 00000000..b9a20a53 --- /dev/null +++ b/tests/cpp/.gitignore @@ -0,0 +1,9 @@ +# Make results +run_tests +run_primary_tests.sh +run_secondary_tests.sh + +# Test temporary files +*.graphml +*.bin + diff --git a/tests/cpp/CMakeLists.txt b/tests/cpp/CMakeLists.txt new file mode 100644 index 00000000..220d5dae --- /dev/null +++ b/tests/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.0) +project(gmumr_tests) + +file (GLOB TEST_SOURCES + "${TESTS_DIR}/cec/*.cpp" + "${TESTS_DIR}/gng/*.cpp" + "${TESTS_DIR}/svm/*.cpp") + +set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${R_LD_FLAGS}") +set(CMAKE_CXX_FLAGS "${R_CXX_FLAGS} -std=c++0x") +add_definitions(-DTEST_DATA_PATH="${TEST_DATA_PATH}") +add_executable(gmumr_tests ${TEST_SOURCES}) +target_link_libraries(gmumr_tests gmum.r gtest gtest_main pthread) +INSTALL(TARGETS gmumr_tests DESTINATION ${TESTS_DIR}) diff --git a/tests/cpp/Makefile b/tests/cpp/Makefile new file mode 100644 index 00000000..56035b85 --- /dev/null +++ b/tests/cpp/Makefile @@ -0,0 +1,125 @@ +$(warning Remember to make sure to delete all R package objects, you can run "make clean" to do it) + +R_HOME=$(shell R RHOME) +R_CPPFLAGS = $(shell $(R_HOME)/bin/R CMD config --cppflags) $(shell $(R_HOME)/bin/Rscript -e "RcppArmadillo:::CxxFlags()") $(shell $(R_HOME)/bin/Rscript -e "Rcpp:::CxxFlags()") + +include ../../src/Makevars + +# Explicit Makevars inheritances adn overrides +CXX := $(CXX) +PREPROCESS = $(DEBUG) -DNRCPP_INTERFACE -DARMA_64BIT_WORD -DBOOST_DISABLE_ASSERTS +PKG_CPPFLAGS = $(GCC_STD) $(PREPROCESS) $(R_CPPFLAGS) $(INCLUDES) -pthread --std=c++0x +CPPFLAGS := $(PKG_CPPFLAGS) -O2 -g -s +LDLIBS := $(PKG_LIBS) -lpthread -lgtest -lgtest_main + +### +# Compilation parameters +### + +# Output executable +TEST_MAIN_BIN := run_tests + +# Root paths +ROOT_INCLUDE_PATH := ../../inst/include +ROOT_SRC_PATH := ../../src +ROOT_TEST_PATH := . + +# Branches that will be compiled +GMUMR_BRANCHES := cec gng svm utils +TEST_BRANCHES := cec gng svm utils + +# Additional cleaning paths +ADDITIONAL_CLEANING := *.graphml *.bin + +# Generate helper scripts for extra tests (run_TESTTYPE_tests.sh) +HELPER_SCRIPT_PATTERN_1 := run_ +HELPER_SCRIPT_PATTERN_2 := _tests.sh +PRIMARY_TESTS_NAME := primary +SECONDARY_TESTS_NAME := secondary + +# GTest names patterns that should be separated from primary tests +# See: `./run_tests -h | grep -- --gtest_filter` +SECONDARY_TESTS_TYPES := *NumericTest* + +### +# Externally compiled libraries +### +SVMLIGHT_INCLUDE_PATH := $(ROOT_INCLUDE_PATH)/svmlight +SVMLIGHT_SRC_PATH := $(ROOT_SRC_PATH)/svmlight +SVMLIGHT_OBJECT_FILENAMES := svm_common.o svm_hideo.o svm_learn.o + +### +# Generic variables +### + +# Compilation +SOURCES_WILDCARD := $(patsubst %, $(ROOT_SRC_PATH)/%/*.cpp, $(GMUMR_BRANCHES)) +TEST_SOURCES_WILDCARD := $(patsubst %, $(ROOT_TEST_PATH)/%/*.cpp, $(TEST_BRANCHES)) + +SOURCES := $(wildcard $(SOURCES_WILDCARD)) +TEST_SOURCES := $(wildcard $(TEST_SOURCES_WILDCARD)) + +OBJECTS := $(patsubst %.cpp, %.o, $(SOURCES)) +TEST_OBJECTS := $(patsubst %.cpp, %.o, $(TEST_SOURCES)) + +# Externally compiled libraries objects +SVMLIGHT_OBJECT_PATHS := $(patsubst %, $(SVMLIGHT_SRC_PATH)/%, $(SVMLIGHT_OBJECT_FILENAMES)) + +# Sum up compiled libraries objects +COMPILED_LIBRARIES_OBJECTS := $(SVMLIGHT_OBJECT_PATHS) + +# Helper scripts +PRIMARY_TESTS_SCRIPT := $(patsubst %, $(HELPER_SCRIPT_PATTERN_1)%$(HELPER_SCRIPT_PATTERN_2), $(PRIMARY_TESTS_NAME)) +SECONDARY_TESTS_SCRIPT := $(patsubst %, $(HELPER_SCRIPT_PATTERN_1)%$(HELPER_SCRIPT_PATTERN_2), $(SECONDARY_TESTS_NAME)) + +### +# Includes +### + +# Root directory (to allow explicit including f.e `#include "svm/svm_basic.h"`) +ROOT_INCLUDE := -I $(ROOT_INCLUDE_PATH) + +# Generic branch includes +BRANCH_INCLUDES := $(patsubst %, -I $(ROOT_INCLUDE_PATH)/%, $(GMUMR_BRANCHES)) + +# Sum up compiled libraries includes +SVMLIGHT_INCLUDE := -I $(SVMLIGHT_INCLUDE_PATH) +COMPILED_LIBRARIES_INCLUDES := $(SVMLIGHT_INCLUDE) + +# Sum up all +INCLUDES := $(ROOT_INCLUDE) $(BRANCH_INCLUDES) $(COMPILED_LIBRARIES_INCLUDES) + +### +# Compilation rules +### + +CPPFLAGS := $(CPPFLAGS) $(INCLUDES) -DNRCPP_INTERFACE +CFLAGS := $(CPPFLAGS) + +all: $(TEST_MAIN_BIN) helper_scripts + @echo ... Done! + @echo See running options with ./run_tests -h + @echo or use helper scripts: $(PRIMARY_TESTS_SCRIPT) $(SECONDARY_TESTS_SCRIPT) + +clean: + rm $(OBJECTS) + rm -f $(TEST_MAIN_BIN) $(TEST_OBJECTS) $(COMPILED_LIBRARIES_OBJECTS)\ + $(PRIMARY_TESTS_SCRIPT) $(SECONDARY_TESTS_SCRIPT) $(ADDITIONAL_CLEANING) + +$(TEST_MAIN_BIN): $(TEST_OBJECTS) $(OBJECTS) $(COMPILED_LIBRARIES_OBJECTS) + @echo Linking all tests... + $(CXX) $^ -lgtest_main -o $@ $(LDLIBS) + +%.o: %.cpp %.hpp + @echo Compiling $@ ... + $(CXX) $(CPPFLAGS) -c $< -o $@ + +helper_scripts: + @echo Making helper scripts... + make helper_script filename="$(PRIMARY_TESTS_SCRIPT)" pattern=-$(SECONDARY_TESTS_TYPES) + make helper_script filename="$(SECONDARY_TESTS_SCRIPT)" pattern=$(SECONDARY_TESTS_TYPES) + +helper_script: + $(shell printf "#!/usr/bin/env bash\n./run_tests --gtest_filter=$(pattern)\n" > $(filename)) + $(shell chmod +x $(filename)) + diff --git a/tests/cpp/README.md b/tests/cpp/README.md new file mode 100644 index 00000000..f848ffe7 --- /dev/null +++ b/tests/cpp/README.md @@ -0,0 +1,101 @@ +C++ Tests +========= + +This directory contains C++ code tests. + +## Dependencies + +### GoogleTest + +To compile these tests you need to install **GoogleTest** >= 1.6.0. + +Ensure that GTest library is installed: + +`ldconfig -p | grep gtest` + +If there is no output, you must istall gtest package or compile gtest into a +library (shared object). + +#### Arch Linux + +`pacman -Sy gtest` + +#### Debian / Ubuntu + +Download latest version of GTest from here: + +https://code.google.com/p/googletest/downloads/list + +For example: + +``` +$ wget https://googletest.googlecode.com/files/gtest-1.7.0.zip +``` + +Unzip, configure and make gtest: + +``` +$ unzip gtest-1.7.0.zip +$ cd gtest-1.7.0 +$ ./configure +$ make +``` + +Copy includes and shared objects into system directories (of Debian / Ubuntu): + +``` +$ sudo cp -a include/gtest /usr/include +$ sudo cp -a lib/.libs/* /usr/lib/ +``` + +Update ldconfig by simply running `sudo ldconfig`. + +### GMUM.R dependencies + +C++ **Armadillo** library and its dependencies ( **LAPACK**, **BLAS**) need to be installed. + +## Build + +To build tests `cd` into this directory (`tests/cpp`) and type `make`. + +## Run tests + +Run compiled executable binary: `./run_tests` + +Big data tests require external input files, run `big_data_downloader.sh` +to download them. + +To run tests individually, use helper `./*.sh` scripts or refer to +`./run_tests -h` from GTest. + +Helper scripts: + +* `run_primary_tests.sh` runs primary coverage tests used in CI +* `run_secondary_tests.sh` runs long-lasting tests for f.e. validation of +results of the computations + +## Adding tests + +Add new tests as `*.cpp` files in subdirectories. You can create a new directory +for your own branch/type of tests. To compile new directory, add its name to +Makefile `TEST_BRANCHES` variable. + +## Duration of the tests + +Please consider adding your test to **secondary tests** if your test usually +takes more than a minute. You can do that by giving it a name containing +`NumericTest` string or define your own type of test and append its pattern to +`SECONDARY_TESTS_TYPES` variable (list) in Makefile. This will include them to +secondary tests during the compilation. + +## Disabling tests + +To disable single test case, prefix it with a `DISABLE_` string. + +To disable file, rename its extension. For example from `*.cpp` to `*.xcpp`. + +We want to disable tests when they are for example no longer compatible with +current architecture or in any other way are blockers for +continous integration. Disabled tests can be fixed later, while continous +integration still goes good. + diff --git a/tests/cpp/cec/big_data_downloader.sh b/tests/cpp/cec/big_data_downloader.sh new file mode 100755 index 00000000..82fa6cf4 --- /dev/null +++ b/tests/cpp/cec/big_data_downloader.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +URL="http://data.gmum.net/gmum_r/cec/big_tests" +DATA_DIR="../../../inst/dev/datasets/cec" +DIRS=`cat test_data_dirs_list` +FILES=`cat test_data_list` + +# mkdir if not exist +for d in $DIRS +do + mkdir -p $DATA_DIR/$d +done + +# Download data +for f in $FILES +do + wget $URL/$f -O $DATA_DIR/$f +done diff --git a/tests/cpp/cec/cluster_reader.cpp b/tests/cpp/cec/cluster_reader.cpp new file mode 100644 index 00000000..023a621c --- /dev/null +++ b/tests/cpp/cec/cluster_reader.cpp @@ -0,0 +1,163 @@ +#include "cluster_reader.hpp" +#include + +ClusterReader::ClusterReader(const char * name, unsigned int dim) { + m_energy = -1; + m_folder_name = std::string(name); + m_dim = dim; +} + +std::string ClusterReader::prefix() { + //TODO: fix path to ../../inst +#ifdef TEST_DATA_PATH + return std::string(TEST_DATA_PATH) + "/cec/" + m_folder_name + "/"; +#else + return std::string("../../inst/dev/datasets/cec/") + m_folder_name + "/"; +#endif +} + +std::string ClusterReader::cluster_path() { + return prefix() + "cluster.txt"; +} + +std::string ClusterReader::input_path() { + return prefix() + "input.txt"; +} + +std::string ClusterReader::energy_path() { + return prefix() + "energy.txt"; +} + +std::string ClusterReader::dimension_path() { + return prefix() + "dimension.txt"; +} + +void ClusterReader::normalize_clustering(std::vector &clusters) +{ + int min = *(std::min_element(clusters.begin(), clusters.end())); + for (std::vector::iterator it = clusters.begin(); + it != clusters.end(); ++it) { + *it -= min; + } +} + +void ClusterReader::read_points() { + // std::cout << " read points " << std::endl; + if(m_dim == 0) + { + read_dimension(); + } + + std::ifstream file(input_path().c_str()); + if (file.is_open()) { + std::string line; + while (file.good()) { + std::getline(file, line); + std::stringstream ss(line); + std::vector current_vector; + if (line.size() < m_dim) + continue; + for (unsigned int i = 0; i < m_dim; ++i) { + double x; + ss >> x; + current_vector.push_back(x); + } + m_points.push_back(current_vector); + } + file.close(); + } else { + std::cerr << "Failed to open " << input_path() << std::endl; + throw std::runtime_error(input_path() + "Failed to open "); + } + // std::cout << "Finish reading opening. Read " << m_points.size() << std::endl; +} + +void ClusterReader::read_clustering() { + // std::cout << "reading clusters " << std::endl; + std::ifstream file(cluster_path().c_str()); + std::vector clusters; + if (file.is_open()) { + std::string line; + while (file.good()) { + std::getline(file, line); + std::stringstream ss(line); + if (line.size() == 0) + continue; + int x; + ss >> x; + clusters.push_back(x); + } + file.close(); + normalize_clustering(clusters); + for(int i = 0; i < clusters.size(); ++i) + { + m_clustering.push_back(clusters[i]); + } + } else { + std::cerr << "Failed to open " << cluster_path() << std::endl; + throw std::runtime_error(cluster_path() + "Failed to open "); + } + // std::cout << "Finished reading clusters. Read " << m_clustering.size() << std::endl; +} + +void ClusterReader::read_energy() { + std::ifstream file(energy_path().c_str()); + if (file.is_open()) { + file >> m_energy; + file.close(); + } else { + std::cerr << "Failed to open " << energy_path() << std::endl; + throw std::runtime_error(energy_path() + "Failed to open "); + } +} + +void ClusterReader::read_dimension() { + std::ifstream file(dimension_path().c_str()); + if (file.is_open()) { + file >> m_dim; + file.close(); + } else { + std::cerr << "Failed to open " << dimension_path() << std::endl; + throw std::runtime_error(dimension_path() + "Failed to open "); + } +} +void ClusterReader::get_points(std::vector > & out) { + if (m_points.size() == 0) + read_points(); + for (std::vector >::iterator it = m_points.begin(); + it != m_points.end(); ++it) + out.push_back(*it); + +} + +void ClusterReader::get_clustering(std::vector & out) { + if (m_clustering.size() == 0) + read_clustering(); + for (std::vector::iterator it = m_clustering.begin(); + it != m_clustering.end(); ++it) + out.push_back(*it); +} + +double ClusterReader::get_energy() { + if (m_energy == -1) + read_energy(); + return m_energy; +} + +double ClusterReader::get_dimension() { + return m_dim; +} + +arma::mat ClusterReader::get_points_in_matrix() { + if (m_points.size() == 0) + read_points(); + unsigned int n = m_points.size(); + arma::mat result(n, m_dim); + for (unsigned int i = 0; i < n; ++i) { + for (unsigned int j = 0; j < m_dim; ++j) { + result(i, j) = m_points[i][j]; + } + } + return result; +} + diff --git a/tests/cpp/cec/cluster_reader.hpp b/tests/cpp/cec/cluster_reader.hpp new file mode 100644 index 00000000..aa701d92 --- /dev/null +++ b/tests/cpp/cec/cluster_reader.hpp @@ -0,0 +1,40 @@ +#ifndef CLUSTERREADER_HPP +#define CLUSTERREADER_HPP + +#include +#include +#include +#include +#include +#include + +class ClusterReader { +protected: + std::string prefix(); + std::string input_path(); + std::string cluster_path(); + std::string energy_path(); + std::string dimension_path(); + void normalize_clustering(std::vector& clusters); + + std::string m_folder_name; + std::vector > m_points; + std::vector m_clustering; + double m_energy; + unsigned int m_dim; +public: + ClusterReader(const char * name, unsigned int dim = 0); + void read_points(); + void read_clustering(); + void read_dimension(); + + void get_points(std::vector > & out); + void get_clustering(std::vector & out); + void read_energy(); + double get_energy(); + double get_dimension(); + arma::mat get_points_in_matrix(); + +}; + +#endif diff --git a/tests/cpp/cec/cluster_trace_only_test.cpp b/tests/cpp/cec/cluster_trace_only_test.cpp new file mode 100644 index 00000000..f8930f91 --- /dev/null +++ b/tests/cpp/cec/cluster_trace_only_test.cpp @@ -0,0 +1,109 @@ +#include "gtest/gtest.h" +#include "cluster.hpp" +#include +#include +#include +#include +#include +using namespace gmum; + +TEST(CEC_TraceOnly,AddPoint) { + //arma_rng::set_seed(0); + int n = 10000; + int dim = 2; + int beg = dim+1; + double acceptable_difference = 1e-6; + + std::vector fits; + unsigned id = 1; + arma::mat data(n,dim); + data.randu(); + for (int i = 0; i < n; ++i) { + fits.push_back(id); + } + + arma::mat init_matrix(beg,dim); + for(size_t k = 0; k < beg; ++k) { + for (int j = 0; j < dim; ++j) { + init_matrix(k,j) = data(k,j); + } + } + + boost::scoped_ptr m(new ClusterSpherical(id,fits,init_matrix)); + // Dodajemy element o indeksie i + for (int i = beg; i < n-1; ++i) { + + arma::mat tmp_matrix(i+1,dim); + for (int j = 0; j <=i; ++j) { + tmp_matrix.row(j) = data.row(j); + } + arma::mat covariance= cov(tmp_matrix,1); + arma::mat real_m = mean(tmp_matrix); + + arma::rowvec point(data.row(i)); + m->add_point(point); + ClusterOnlyTrace * upref = dynamic_cast(m.get()); + ClusterStandard tmp(id,fits,tmp_matrix); + arma::rowvec mean_online_difference = upref->get_mean() - real_m; + float trace_diff = upref->get_cov_mat_trace() - arma::trace(covariance); + // float relative_error = std::abs(trace_diff/arma::trace(fixed_covariance)); + + EXPECT_EQ(m->size(),tmp.size()); + EXPECT_LT(std::abs(trace_diff),acceptable_difference); + // std::cout << i << " : " << relative_error << std::endl; + for (int j = 0; j < dim; ++j) { + EXPECT_LT(std::abs(mean_online_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + } + } +} + +TEST(CEC_TraceOnly,removePoint) { + //arma_rng::set_seed(0); + int n = 100; + int dim = 2; + int end = dim+1; + double acceptable_difference = 1e-6; + + std::vector fits; + unsigned id = 1; + arma::mat data(n,dim); + data.randu(); + for (int i = 0; i < n; ++i) { + fits.push_back(id); + } + + arma::mat init_matrix(n,dim); + for(size_t k = 0; k < n; ++k) { + for (int j = 0; j < dim; ++j) { + init_matrix(k,j) = data(k,j); + } + } + + boost::scoped_ptr m(new ClusterSpherical(id,fits,init_matrix)); + // Dodajemy element o indeksie i + for (int i = n-1; i > end; --i) { + + arma::mat tmp_matrix(i,dim); + for (int j = 0; j remove_point(point); + ClusterStandard tmp(id,fits,tmp_matrix); + + ClusterOnlyTrace * upref = dynamic_cast(m.get()); + arma::rowvec mean_online_difference = upref->get_mean() - real_m; + float trace_diff = upref->get_cov_mat_trace() - arma::trace(covariance); + // float relative_error = std::abs(trace_diff/arma::trace(fixed_covariance)); + // std::cout << i << " " << relative_error << std::endl; + EXPECT_EQ(m->size(),tmp.size()); + EXPECT_LT(std::abs(trace_diff), acceptable_difference); + for (int j = 0; j < dim; ++j) { + EXPECT_LT(std::abs(mean_online_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + } + } +} diff --git a/tests/cpp/cec/clustering_comparator.cpp b/tests/cpp/cec/clustering_comparator.cpp new file mode 100644 index 00000000..067ef4bb --- /dev/null +++ b/tests/cpp/cec/clustering_comparator.cpp @@ -0,0 +1,53 @@ +#include "clustering_comparator.hpp" + +double ClusteringComparator::correct(std::vector f, + std::vector & real_fits, + std::vector & my_fits){ + assert(real_fits.size() == my_fits.size()); + size_t nelements = real_fits.size(); + + int ncorrect = 0; + for (unsigned int i = 0; i < nelements; ++i) + { + assert(my_fits[i] < f.size()); + assert(real_fits[i] < f.size()); + + if(real_fits[i] == f[my_fits[i]]) + { + ++ncorrect; + } + } + // std::cout << "numberOfCorrect = " << numberOfCorrect << std::endl; + + return 1.0 * ncorrect / nelements; +} + +double ClusteringComparator::evaluate_clustering(unsigned int nclusters, + std::vector & real_fits, + std::vector & my_fits) +{ + std::vector f = compute_permutation(nclusters, real_fits, my_fits); + assert(f.size() == nclusters); + return correct(f, real_fits, my_fits); +} + + +std::vector BestPermutationComparator::compute_permutation(unsigned int nclusters, + std::vector & real_fits, + std::vector & my_fits) +{ + std::vector current; + for (unsigned int i = 0; i < nclusters; ++i) + current.push_back(i); + std::vector best; + double best_value = 0.0; + do { + double current_value = correct(current, real_fits, my_fits); + // std::cout << "Current value " << currentValue << std::endl; + if (best_value < current_value) { + best = current; + best_value = current_value; + } + } while (std::next_permutation(current.begin(), current.end())); + return best; +} diff --git a/tests/cpp/cec/clustering_comparator.hpp b/tests/cpp/cec/clustering_comparator.hpp new file mode 100644 index 00000000..de2e1c7e --- /dev/null +++ b/tests/cpp/cec/clustering_comparator.hpp @@ -0,0 +1,30 @@ +#ifndef CLUSTERINGCOMPARATOR_HPP +#define CLUSTERINGCOMPARATOR_HPP + +#include +#include + +class ClusteringComparator { +public: + virtual std::vector compute_permutation( + unsigned int nclusters, + std::vector & real_fits, + std::vector & my_fits) = 0; + + double correct(std::vector f, + std::vector & real_fits, + std::vector & my_fits); + + double evaluate_clustering(unsigned int nclusters, + std::vector & real_fits, + std::vector & my_fits); +}; + +class BestPermutationComparator: public ClusteringComparator { +public: + virtual std::vector compute_permutation( + unsigned int nclusters, + std::vector & real_fits, + std::vector & my_fits); +}; +#endif diff --git a/tests/cpp/cec/extra/README.md b/tests/cpp/cec/extra/README.md new file mode 100644 index 00000000..d24c2d15 --- /dev/null +++ b/tests/cpp/cec/extra/README.md @@ -0,0 +1,9 @@ +CEC performance testing +============== + +These scripts compares performance of gmum cec with cran cec. +Configuration is in common.R file. +To make this work you have to set correct cran and gmum paths (paths to root directory). +Basic configuration provides generating points in range from 100 to 5000 with step 50 (100, 150, ..., 5000). +To run script execute ./run.sh, it should generate data folder and 2 plots iters_plot.jpg and times_plot.jpg. +To clean generated results run ./clean.sh diff --git a/tests/cpp/cec/extra/performance_tests/clean.sh b/tests/cpp/cec/extra/performance_tests/clean.sh new file mode 100755 index 00000000..6c24a78d --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/clean.sh @@ -0,0 +1,2 @@ +#!/bin/bash +rm -Rf plots data *.jpg *.log diff --git a/tests/cpp/cec/extra/performance_tests/common.R b/tests/cpp/cec/extra/performance_tests/common.R new file mode 100644 index 00000000..8eeee58c --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/common.R @@ -0,0 +1,88 @@ +library(gmum.r) +library(CEC) +source('../../../../../R/R_scripts/energy.R') +method_types = list(gmum=c('standard', 'spherical', 'diagonal'), cran=c('all', 'spherical', 'diagonal')) +entropy_func_map = list(standard_entropy, sphere_entropy, diagonal_entropy) +nmethod_types = length(method_types$gmum) + +load_dataset <- function(data_path) { + points_file = file.path(data_path, 'input.txt') + clusters_file = file.path(data_path, 'cluster.txt') + + if(!file.exists(points_file)) { + stop(points_file, ": File does not exist") + } + + if(!file.exists(clusters_file)) { + stop(clusters_file, ": File does not exist") + } + + dataset <- as.matrix(read.table(file=points_file, colClasses='numeric')) + clusters <- normalize_clustering(as.vector(as.matrix(read.table(file=clusters_file)))) + k <- length(unique(clusters)) + name <- basename(data_path) + return(list(name=name, k=k, clustering=clusters, dataset=dataset)) +} + +normalize_clustering <- function(clustering) { + return( (clustering - min(clustering)) + 1 ) +} + +gmum_cec <- function(nclusters, nstart, points, init_type, method_type, max_iterations, eps, output_plot_path = NULL) { + t = as.numeric(system.time(c <- CEC(k=nclusters, control.nstart=nstart, x=points, method.init=init_type, method.type=method_type, control.itmax=max_iterations, control.eps=eps, log.ncluster=TRUE))[3]) + if( !is.null(output_plot_path) ) { + jpeg(output_plot_path) + plot(c) + dev.off() + } + + time_ = t + iters_ = c$iterations + energy_ = c$energy + clustering_ = normalize_clustering(c$clustering) + centers_ = c$centers + final_nclusters_ = tail(c$logNumberOfClusters, n=1) + + return (list( + time=time_, + iters=iters_, + energy=energy_, + clustering=clustering_, + centers=centers_, + final_nclusters=final_nclusters_)) +} + +cran_cec <- function(nclusters, nstart, points, init_type, method_type, max_iterations, eps, output_plot_path = NULL) { + t = as.numeric(system.time(c <- cec(centers=nclusters, nstart=nstart, x=points, centers.init=init_type, type=method_type, iter.max=max_iterations, card.min=eps))[3]) + + if( !is.null(output_plot_path) ) { + jpeg(output_plot_path) + plot(c) + dev.off() + } + + return (list( + time=t, + iters=c$iterations, + energy=c$final.cost, + clustering=normalize_clustering(c$cluster), + centers=c$centers, + final_nclusters=c$final.nclusters)) +} + +append_result <- function(method_type, points, times, results, cec_function) { + t = 0 + it = 0 + e = 0 + for(i in 1:times) + { + c = cec_function(method_type, points) + t = t + c$time + it = it + c$iters + e = e + c$energy + } + results$time = c(results$time, t) + results$iters = c(results$iters, it) + results$energy = c(results$energy, e) + return (results) +} diff --git a/tests/cpp/cec/extra/performance_tests/external_dataset_tests.R b/tests/cpp/cec/extra/performance_tests/external_dataset_tests.R new file mode 100644 index 00000000..e3822a33 --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/external_dataset_tests.R @@ -0,0 +1,143 @@ +source('common.R') +library(phyclust) +library(nlme) + +data_path <- '../../../../../inst/data_sets/cec' +plots_path <- file.path('.', 'plots') +gmum_plot_path <- file.path(plots_path, 'gmum') +cran_plot_path <- file.path(plots_path, 'cran') +dir.create(gmum_plot_path, showWarnings = FALSE, recursive = TRUE) +dir.create(cran_plot_path, showWarnings = FALSE, recursive = TRUE) +test_data = list() +default_parameters = list(nstart=1000, init_type='kmeans++', max_iterations=200) + +#test_data[['DimSet']] = list( +# DimSets_32 = load_dataset(data_path = file.path(data_path, "DimSets_32")), +# DimSets_64 = load_dataset(data_path = file.path(data_path, "DimSets_64")), +# DimSets_128 = load_dataset(data_path = file.path(data_path, "DimSets_128")), +# DimSets_256 = load_dataset(data_path = file.path(data_path, "DimSets_256")), +# DimSets_512 = load_dataset(data_path = file.path(data_path, "DimSets_512")) +# DimSets_1024 = load_dataset(data_path = file.path(data_path, "DimSets_1024")) +#) + +test_data[['UCI']] = list( + iris = load_dataset(data_path = file.path(data_path, "UCI","iris")), + glass = load_dataset(data_path = file.path(data_path, "UCI", "glass")), + vowel = load_dataset(data_path = file.path(data_path, "UCI", "vowel")), + wine = load_dataset(data_path = file.path(data_path, "UCI", "wine")), + pendigits = load_dataset(data_path = file.path(data_path, "UCI", "pendigits")), + poker = load_dataset(data_path = file.path(data_path, "UCI", "poker")), + connect_4 = load_dataset(data_path = file.path(data_path, "UCI", "connect_4")) + #covtype = load_dataset(data_path = file.path(data_path, "UCI", "covtype")) +) + +gmum_cec_uci <- function(method_type, points, nclusters, output_plot_path = NULL) { + return (gmum_cec(nstart = default_parameters$nstart, init_type = default_parameters$init_type, max_iterations = default_parameters$max_iterations, method_type = method_type, points = points, nclusters = nclusters, eps = 0.01, output_plot_path)) +} + +cran_cec_uci <- function(method_type, points, nclusters, output_plot_path = NULL) { + return (cran_cec(nstart = default_parameters$nstart, init_type = default_parameters$init_type, max_iterations = default_parameters$max_iterations, method_type = method_type, points = points, nclusters = nclusters, eps = "1%", output_plot_path)) +} + +print('default parameters') +print(default_parameters) + +for(name in names(test_data)) { + for(i in 1:length(test_data[[name]])) { + item = test_data[[name]][[i]] + dataset = item$dataset + dataset_dim = dim(dataset) + + cat(paste(name, '/', item$name, '(k=',item$k,', rows=', dataset_dim[1], ', cols=', dataset_dim[2], ')\n')) + for(j in 1:nmethod_types) { + plot_file_name <- paste(c(name, item$name, method_types$gmum[j], '.jpg'), collapse='_') + + gmum <- tryCatch({ + gmum_result <- gmum_cec_uci(method_type = method_types$gmum[j], points = dataset, nclusters = item$k) + gmum_clustering_df <- data.frame(gmum_result$clustering, item$clustering) + colnames(gmum_clustering_df) <- c('gmum', 'correct') + gmum_rand_index <- RRand(prcl = gmum_result$clustering, trcl = item$clustering)$Rand + gmum_energy = cec_energy(dataset = dataset, clustering = gmum_result$clustering, entropy_func = entropy_func_map[[j]]) + if(gmum_rand_index < 0) { + print('rand index < 0!') + print('gmum clustering') + print(gmum_result$clustering) + print('correct clustering') + print(item$clustering) + } + gmum_bic <- BIC(lm(gmum ~ correct, data=gmum_clustering_df)) + list( + energy = gmum_result$energy, + gmum_energy = gmum_energy, + iters = gmum_result$iters, + time = gmum_result$time, + final_nclusters = gmum_result$final_nclusters, + rand_index = gmum_rand_index, + bic = gmum_bic + ) + }, error = function(e) { + cat(paste('gmum: error occured: ', e)) + return(list( + energy = 'failed', + iters = 'failed', + time = 'failed', + final_nclusters = 'failed', + rand_index = 'failed', + bic = 'failed')) + }) + + cran <- tryCatch({ + cran_result <- cran_cec_uci(method_type = method_types$cran[j], points = dataset, nclusters = item$k) + cran_clustering_df <- data.frame(cran_result$clustering, item$clustering) + colnames(cran_clustering_df) <- c('cran', 'correct') + gmum_energy = cec_energy(dataset = dataset, clustering = cran_result$clustering, entropy_func = entropy_func_map[[j]]) + cran_rand_index <- RRand(prcl = cran_result$clustering, trcl = item$clustering)$Rand + if(cran_rand_index < 0) { + print('rand index < 0!') + print('cran clustering') + print(cran_result$clustering) + print('correct clustering') + print(item$clustering) + } + cran_bic <- BIC(lm(cran ~ correct, data=cran_clustering_df)) + list( + energy = cran_result$energy, + gmum_energy = gmum_energy, + iters = cran_result$iters, + time = cran_result$time, + final_nclusters = cran_result$final_nclusters, + rand_index = cran_rand_index, + bic = cran_bic + ) + }, error = function(e) { + cat(paste('cran: error occured: ', e)) + return(list( + energy = 'failed', + iters = 'failed', + time = 'failed', + final_nclusters = 'failed', + rand_index = 'failed', + bic = 'failed')) + }) + + table_data <- matrix( + c(gmum$energy, cran$energy, + gmum$gmum_energy, cran$gmum_energy, + gmum$iters, cran$iters, + gmum$time, cran$time, + gmum$final_nclusters, cran$final_nclusters, + gmum$rand_index, cran$rand_index, + gmum$bic, cran$bic), + ncol=2, + byrow=TRUE + ) + + cat('method type: ', method_types$gmum[j], '\n') + table_data <- as.table(table_data) + colnames(table_data) <- c('gmum', 'cran') + rownames(table_data) <- c('energy','R energy func', 'iters', 'time', 'clusters', 'rand index', 'BIC') + print(table_data) + cat('\n') + } + } +} diff --git a/tests/cpp/cec/extra/performance_tests/performance_tests.R b/tests/cpp/cec/extra/performance_tests/performance_tests.R new file mode 100644 index 00000000..b8da4a3b --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/performance_tests.R @@ -0,0 +1,73 @@ +source('common.R') +source('plot_generator.R') +source('../../../../../R/R_scripts/mouseGaussGenerator.R') + +run_tests <- function(npoints_start, ndatasets, npoints_step, averaging, gmum_cec_function, cran_cec_function) { + results = list( + gmum=list( + standard=list(time=c(), iters=c(), energy=c()), + sphere=list(time=c(), iters=c(), energy=c()), + diagonal=list(time=c(), iters=c(), energy=c()) + ), + cran=list( + all=list(time=c(), iters=c(), energy=c()), + spherical=list(time=c(), iters=c(), energy=c()), + diagonal=list(time=c(), iters=c(), energy=c()) + ) + ) + x = seq(npoints_start, npoints_start + ndatasets * npoints_step, by=npoints_step) + + for(i in x) { + #generate dataset + size_of_data <- c(i,i,i) + ear_distance <- 2 + dataset <- mouseGaussGenerator(size_of_data, ear_distance)$data + + for(j in 1:nmethod_types) { + gmum_method_type_name = method_types$gmum[j] + cran_method_type_name = method_types$cran[j] + + results$gmum[[gmum_method_type_name]] = append_result(gmum_method_type_name, dataset, averaging, results$gmum[[gmum_method_type_name]], gmum_cec_function) + results$cran[[cran_method_type_name]] = append_result(cran_method_type_name, dataset, averaging, results$cran[[cran_method_type_name]], cran_cec_function) + } + } + + for(j in 1:nmethod_types) { + method_type_name = method_types$gmum[j] + x_label_ = "Number of points" + y1_label_ = "gmum" + y2_label_ = "cran" + + plot_name_ = paste(c(method_type_name,' times plot'), collapse='') + y1_ = results$gmum[[method_types$gmum[j]]]$time + y2_ = results$cran[[method_types$cran[j]]]$time + y_label_ = "Time" + file_name_ = paste(c(method_type_name,'_times_plot.jpg'), collapse='') + generate_plot(plot_name_, x, y1_, y2_, x_label_, y_label_, y1_label_, y2_label_, file_name_) + + plot_name_ = paste(c(method_type_name,' energy plot'), collapse='') + y1_ = results$gmum[[method_types$gmum[j]]]$energy + y2_ = results$cran[[method_types$cran[j]]]$energy + y_label_ = "Energy" + file_name_ = paste(c(method_type_name,'_energy_plot.jpg'), collapse='') + generate_plot(plot_name_, x, y1_, y2_, x_label_, y_label_, y1_label_, y2_label_, file_name_) + + plot_name_ = paste(c(method_type_name,' time/iter plot'), collapse='') + y1_ = results$gmum[[method_types$gmum[j]]]$time / results$gmum[[method_types$gmum[j]]]$iters + y2_ = results$cran[[method_types$cran[j]]]$time / results$cran[[method_types$cran[j]]]$iters + x_label_ = "Number of points" + y_label_ = "Time / iter" + file_name_ = paste(c(method_type_name,'_time_iter_plot.jpg'), collapse='') + generate_plot(plot_name_, x, y1_, y2_, x_label_, y_label_, y1_label_, y2_label_, file_name_) + } +} + +perf_gmum_cec <- function(method_type, points) { + return (gmum_cec(nclusters = 3, nstart = 1, points = points, init_type = 'random', method_type = method_type, max_iterations = 200, eps = 0.05)) +} + +perf_cran_cec <- function(method_type, points) { + return (cran_cec(nclusters = 3, nstart = 1, points = points, init_type = 'random', method_type = method_type, max_iterations = 200, eps = "5%")) +} + +run_tests(npoints_start=200, ndatasets=10, npoints_step=500, averaging=5, gmum_cec_function=perf_gmum_cec, cran_cec_function=perf_cran_cec) diff --git a/tests/cpp/cec/extra/performance_tests/plot_generator.R b/tests/cpp/cec/extra/performance_tests/plot_generator.R new file mode 100644 index 00000000..0e1cc315 --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/plot_generator.R @@ -0,0 +1,18 @@ +generate_plot <- function(plot_name, x, y1, y2, x_label, y_label, y1_label, y2_label, file_name) { + print(file_name) + min_x = min(x) + max_x = max(x) + min_y = min(min(y1), min(y2)) + max_y = max(max(y1), max(y2)) + + jpeg(file_name) + plot( c(min_x, max_x), c(min_y, max_y), type="n", xlab=x_label, ylab=y_label ) + title(main=plot_name) + lines(x, y1, col="green", lwd=2.5) + lines(x, y2, col="red", lwd=2.5) + par(xpd=TRUE) + legend("bottom", legend = c(y1_label, y2_label), text.width = max(sapply(text, strwidth)), + col=c("green", "red"), lwd=5, cex=1, horiz = TRUE) + par(xpd=FALSE) + dev.off() +} diff --git a/tests/cpp/cec/extra/performance_tests/run.sh b/tests/cpp/cec/extra/performance_tests/run.sh new file mode 100755 index 00000000..db7e4081 --- /dev/null +++ b/tests/cpp/cec/extra/performance_tests/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash +mkdir data +R < performance_tests.R --vanilla +R < external_dataset_tests.R --vanilla diff --git a/tests/cpp/cec/extra/valgrind/test_demo.R b/tests/cpp/cec/extra/valgrind/test_demo.R new file mode 100644 index 00000000..c34c8de4 --- /dev/null +++ b/tests/cpp/cec/extra/valgrind/test_demo.R @@ -0,0 +1,4 @@ +gmum_cec_path = '../../../../../' +library(devtools) +load_all(gmum_cec_path) +demo(cec) diff --git a/tests/cpp/cec/extra/valgrind/valgrind.sh b/tests/cpp/cec/extra/valgrind/valgrind.sh new file mode 100755 index 00000000..8d608cf4 --- /dev/null +++ b/tests/cpp/cec/extra/valgrind/valgrind.sh @@ -0,0 +1 @@ +R -d "valgrind --tool=memcheck --leak-check=full" --vanilla < test_\demo.R > log_demo.txt 2>&1 diff --git a/tests/cpp/cec/not_working/big_data_tests.xcpp b/tests/cpp/cec/not_working/big_data_tests.xcpp new file mode 100644 index 00000000..05b8cac1 --- /dev/null +++ b/tests/cpp/cec/not_working/big_data_tests.xcpp @@ -0,0 +1,114 @@ +#include "gtest/gtest.h" +#include "cluster_reader.hpp" +#include "cec.hpp" +#include "hartigan.hpp" +#include "randomAssignment.hpp" +#include "cecConfiguration.hpp" +#include +#include + +using namespace gmum; + +#define SHOW(x) std::cout << #x << " = " << x << std::endl +int times = 10; +void run(const char * str, int numberOfClusters, int times = 1) { + std::cout.precision(41); + boost::shared_ptr < std::vector + > clustering(new std::vector()); + ClusterReader clusterReader(str, 4); + clusterReader.getClustering(*clustering); + + int min = *(std::min_element(clustering->begin(), clustering->end())); + for (std::vector::iterator it = clustering->begin(); + it != clustering->end(); ++it) + *it -= min; + boost::shared_ptr < arma::mat + > points(new arma::mat(clusterReader.getPointsInMatrix())); + Params params; + params.killThreshold = 0.0001; + params.nrOfClusters = numberOfClusters; + params.dataset = points; + params.clusterType = kstandard; + params.nstart = 10; + + for (int i = 0; i < times; ++i) { + cecConfiguration *conf = new cecConfiguration(); + conf->setParams(params); + conf->setMethodInit("random"); + cecModel cec(conf); + cec.loop(); + std::cout << "Energy " << cec.entropy() << std::endl; + delete conf; + } + //TODO + ASSERT_TRUE(true); +} + +void runSpherical(const char * str, int numberOfClusters, int times = 1) { + std::cout.precision(41); + boost::shared_ptr < std::vector + > clustering(new std::vector()); + ClusterReader clusterReader(str, 4); + clusterReader.getClustering(*clustering); + + int min = *(std::min_element(clustering->begin(), clustering->end())); + for (std::vector::iterator it = clustering->begin(); + it != clustering->end(); ++it) + *it -= min; + boost::shared_ptr < arma::mat + > points(new arma::mat(clusterReader.getPointsInMatrix())); + Params params; + params.killThreshold = 0.0001; + params.nrOfClusters = numberOfClusters; + params.dataset = points; + params.clusterType = kstandard; + params.nstart = 10; + +// std::vector types; +// std::vector radius; +// std::vector covMatrices; +// for (int i = 0 ; i < numberOfClusters ; ++i){ +// types.push_back(sphere); +// } + for (int i = 0; i < times; ++i) { + cecConfiguration *conf = new cecConfiguration(); + conf->setParams(params); + conf->setMethodInit("random"); + cecModel cec(conf); + cec.loop(); + std::cout << "Energy " << cec.entropy() << std::endl; + delete conf; + } +} + +TEST(BigData, Normal_1) { + run("bigData_1", 3, times); +} + +TEST(BigData, Normal_2) { + run("bigData_2", 3, times); +} + +TEST(BigData, Normal_3) { + run("bigData_3", 3, times); +} + +TEST(BigData, Normal_4) { + run("bigData_4", 3, times); +} + +TEST(BigData, Spherical_1) { + runSpherical("bigData_1", 3, times); +} + +TEST(BigData, Spherical_2) { + runSpherical("bigData_2", 3, times); +} + +TEST(BigData, Spherical_3) { + runSpherical("bigData_3", 3, times); +} + +TEST(BigData, Spherical_4) { + runSpherical("bigData_4", 3, times); +} diff --git a/tests/cpp/cec/not_working/testReader.xcpp b/tests/cpp/cec/not_working/testReader.xcpp new file mode 100644 index 00000000..c31712a0 --- /dev/null +++ b/tests/cpp/cec/not_working/testReader.xcpp @@ -0,0 +1,14 @@ +#include "gtest/gtest.h" +#include "utils/cluster_test_reader.hpp" +#include +#include +TEST(TestReader,4-GaussData) { + + ClusterTestReader reader("4-Gauss"); + std::vector points; + std::vector tags; + reader.readData(2,points); + reader.readAnswer(tags); + + ASSERT_EQ(points.size(),tags.size()); +} diff --git a/tests/cpp/cec/not_working/utils/GNUmakefile b/tests/cpp/cec/not_working/utils/GNUmakefile new file mode 100644 index 00000000..9753c513 --- /dev/null +++ b/tests/cpp/cec/not_working/utils/GNUmakefile @@ -0,0 +1,29 @@ +include ../vars.mk + +CURRENT_HFILES := $(wildcard *.h) + +SRC = ../../../src/ +HFILES = $(wildcard $(SRC)*.hpp) +OFILES = $(patsubst $(SRC)%.cpp,$(SRC)$(OBJDIR)%.o, $(wildcard $(SRC)*.cpp)) +CURRENT_SRC := $(wildcard *.cpp) +CURRENT_OBJS := $(patsubst %.cpp,%.o,$(CURRENT_SRC)) +CURRENT_OBJ_FILES = ${CURRENT_OBJS:%=${OBJDIR}%} +all: main +fresh: clean main + +main: $(CURRENT_OBJ_FILES) + +-include ${OBJ_FILES:%=%.d} + +$(OBJDIR)%.o: %.cpp $(HFILES) $(CURRENT_HFILES) + @mkdir -p $(OBJDIR) + $(CXX) $(CXXFLAGS) $(I_PATH) -MT $@ -MD -MP -MF $(@:$(OBJDIR)%=$(OBJDIR)%.d) -c -o $@ $< + + +clean: + rm -f $(OBJDIR)*.o $(OBJDIR)*.o.d + +debug: + @echo $(CURRENT_OBJ_FILES) + @echo $(CXX) + diff --git a/tests/cpp/cec/not_working/utils/cluster_test_reader.xcpp b/tests/cpp/cec/not_working/utils/cluster_test_reader.xcpp new file mode 100644 index 00000000..c77ebbb1 --- /dev/null +++ b/tests/cpp/cec/not_working/utils/cluster_test_reader.xcpp @@ -0,0 +1,60 @@ +#include "cluster_test_reader.hpp" +#include +#include +#include +#include + +ClusterTestReader::ClusterTestReader(std::string _dir) : + dir(_dir) { +} + +std::string ClusterTestReader::getPrefix() { + std::string result("data/"); + result.append(dir); + return result; +} + +std::string ClusterTestReader::getDataPath() { + return getPrefix() + "data.txt"; +} + +std::string ClusterTestReader::getAnswerPath() { + return getPrefix() + "answer.txt"; +} + +void ClusterTestReader::readData(int dim, + std::vector & out) { + std::ifstream file(getDataPath().c_str()); + if (file.is_open()) { + std::string line; + while (file.good()) { + std::getline(file, line); + Rcpp::NumericVector v(dim); + std::stringstream ss(line); + for (int i = 0; i < dim; ++i) { + double x; + ss >> x; + v[i] = x; + } + out.push_back(v); + } + } else { + std::cout << "Failed to open file " << getDataPath() << std::endl; + } +} + +void ClusterTestReader::readAnswer(std::vector & out) { + std::ifstream file(getAnswerPath().c_str()); + if (file.is_open()) { + std::string line; + while (file.good()) { + std::getline(file, line); + std::stringstream ss(line); + int v; + ss >> v; + out.push_back(v); + } + } else { + std::cout << "Failed to open file " << getAnswerPath() << std::endl; + } +} diff --git a/tests/cpp/cec/not_working/utils/cluster_test_reader.xhpp b/tests/cpp/cec/not_working/utils/cluster_test_reader.xhpp new file mode 100644 index 00000000..8e0c50ff --- /dev/null +++ b/tests/cpp/cec/not_working/utils/cluster_test_reader.xhpp @@ -0,0 +1,20 @@ +#ifndef CLUSTERTESTREADER_HPP +#define CLUSTERTESTREADER_HPP + +#include +#include +#include + +class ClusterTestReader { +private: + std::string dir; + std::string getPrefix(); + std::string getDataPath(); + std::string getAnswerPath(); +public: + ClusterTestReader(std::string _dir); + void readData(int dim, std::vector & out); + void readAnswer(std::vector & out); +}; + +#endif diff --git a/tests/cpp/cec/online_test.cpp b/tests/cpp/cec/online_test.cpp new file mode 100644 index 00000000..0b26ce86 --- /dev/null +++ b/tests/cpp/cec/online_test.cpp @@ -0,0 +1,123 @@ +#include "gtest/gtest.h" +#include +#include +#include +#include +#include +#include "cluster.hpp" +using namespace gmum; + +TEST(CEC_OnlineFormulas,AddPoint) { + //arma_rng::set_seed(0); + int n = 100; + int dim = 2; + int beg = dim+1; + double acceptable_difference = 1e-10; + + std::vector fits; + unsigned id = 1; + arma::mat data(n,dim); + data.randu(); + for (int i = 0; i < n; ++i) { + fits.push_back(id); + } + + arma::mat init_matrix(beg,dim); + for(int k = 0; k < beg; ++k) { + for (int j = 0; j < dim; ++j) { + init_matrix(k,j) = data(k,j); + } + } + + boost::shared_ptr m = boost::make_shared(id,fits,init_matrix); + // Dodajemy element o indeksie i + for (int i = beg; i < n-1; ++i) { + + arma::mat tmp_matrix(i+1,dim); + for (int j = 0; j <=i; ++j) { + tmp_matrix.row(j) = data.row(j); + } + arma::mat covariance= cov(tmp_matrix,1); + arma::mat real_m = mean(tmp_matrix); + + arma::rowvec point(data.row(i)); + m->add_point(point); + ClusterStandard tmp(id,fits,tmp_matrix); + arma::rowvec mean_online_difference = m->get_mean() - real_m; + arma::mat mean_init_difference = real_m - tmp.get_mean(); + arma::mat cov_online_difference = m->get_cov_mat(id, fits, tmp_matrix) - covariance; + arma::mat cov_init_difference = covariance - tmp.get_cov_mat(id, fits, tmp_matrix); + + EXPECT_EQ(m->size(),tmp.size()); + + for (int j = 0; j < dim; ++j) { + EXPECT_LT(std::abs(mean_online_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + EXPECT_LT(std::abs(mean_init_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + } + + for (int j = 0; j < dim; ++j) { + for (int k = 0; k < dim; ++k) { + EXPECT_LT(std::abs(cov_online_difference(j,k)),acceptable_difference ) << " at position (" << j << "," << k << ")" << "differs by more than " << acceptable_difference; + EXPECT_LT(std::abs(cov_init_difference(j,k)),acceptable_difference) << " at position (" << j << "," << k << ")" << "differs by more than " << acceptable_difference; + } + } + } +} + +TEST(CEC_OnlineFormulas,removePoint) { + //arma_rng::set_seed(0); + int n = 100; + int dim = 2; + int end = dim+1; + double acceptable_difference = 1e-10; + + std::vector fits; + unsigned id = 1; + arma::mat data(n,dim); + data.randu(); + for (int i = 0; i < n; ++i) { + fits.push_back(id); + } + + arma::mat init_matrix(n,dim); + for(int k = 0; k < n; ++k) { + for (int j = 0; j < dim; ++j) { + init_matrix(k,j) = data(k,j); + } + } + + boost::shared_ptr m = boost::make_shared(id,fits,init_matrix); + // Dodajemy element o indeksie i + for (int i = n-1; i > end; --i) { + + arma::mat tmp_matrix(i,dim); + for (int j = 0; j remove_point(point); + ClusterStandard tmp(id,fits,tmp_matrix); + arma::rowvec mean_online_difference = m->get_mean() - real_m; + arma::mat mean_init_difference = real_m - tmp.get_mean(); + arma::mat cov_online_difference = m->get_cov_mat(id, fits, tmp_matrix) - covariance; + arma::mat cov_init_difference = covariance - tmp.get_cov_mat(id, fits, tmp_matrix); + + EXPECT_EQ(m->size(),tmp.size()); + + for (int j = 0; j < dim; ++j) { + EXPECT_LT(std::abs(mean_online_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + EXPECT_LT(std::abs(mean_init_difference(j)),acceptable_difference) << "at position" << j << " means differ by more than " << acceptable_difference; + } + + for (int j = 0; j < dim; ++j) { + for (int k = 0; k < dim; ++k) { + EXPECT_LT(std::abs(cov_online_difference(j,k)),acceptable_difference ) << " at position (" << j << "," << k << ")" << "differs by more than " << acceptable_difference; + EXPECT_LT(std::abs(cov_init_difference(j,k)),acceptable_difference) << " at position (" << j << "," << k << ")" << "differs by more than " << acceptable_difference; + } + } + } +} + diff --git a/tests/cpp/cec/parametrized_tests.cpp b/tests/cpp/cec/parametrized_tests.cpp new file mode 100644 index 00000000..f5b0f210 --- /dev/null +++ b/tests/cpp/cec/parametrized_tests.cpp @@ -0,0 +1,140 @@ +#include "tests_fixture.hpp" +#include "clustering_comparator.hpp" +#include "cec.hpp" + +#include + +using namespace gmum; + +TEST_P(EnergyTests, IsEnergyCorrect) +{ + int number_of_times_acceptable = 0; + + for (unsigned int i = 0; i < times; ++i) + { + CecConfiguration conf; + conf.set_params(params); + conf.set_algorithm("hartigan"); + conf.set_seed(seed); + CecModel cec(&conf); + double diff = std::fabs(cec.get_energy() - expected_energy); + + if( (cec.get_energy() < expected_energy) || (diff <= 10e-5) ) + { + ++number_of_times_acceptable; + } + } + EXPECT_GT(number_of_times_acceptable , times / 2.0); +} + +TEST_P(CoverageTests, IsCoverageCorrect) +{ + BestPermutationComparator comparator; + int number_of_times_acceptable = 0; + + for (unsigned int i = 0; i < times; ++i) + { + CecConfiguration conf; + conf.set_params(params); + conf.set_algorithm("hartigan"); + conf.set_seed(seed); + CecModel cec(&conf); + std::vector clustering = cec.get_assignment(); + double percentage = comparator.evaluate_clustering(params.nclusters, expected_clustering, clustering); + + if( percentage >= 0.9 ) + { + ++number_of_times_acceptable; + } + } + EXPECT_GT(number_of_times_acceptable , times / 2.0); +} + +TEST_P(BigDataTests, BigDataTest) +{ + for (unsigned int i = 0; i < times; ++i) + { + CecConfiguration conf; + conf.set_params(params); + conf.set_algorithm("hartigan"); + conf.set_seed(seed); + CecModel cec(&conf); + ASSERT_LE(cec.iters(), iterations_limit); + } +} + +TEST_P(ControlEpsTests, IsControlEpsBoundaryCaseCorrect) +{ + CecConfiguration conf; + conf.set_params(params); + conf.set_algorithm("hartigan"); + conf.set_seed(seed); + CecModel cec(&conf); + int final_nclusters = cec.get_nclusters().back(); + EXPECT_EQ(final_nclusters, 1); +} + +INSTANTIATE_TEST_CASE_P(CEC_NumericTest, BigDataTests, ::testing::Values( + BigDataTestsFixtureParam(ClusterReader("bigData_1"), boost::make_shared(3, kstandard, kkmeanspp), 25), + BigDataTestsFixtureParam(ClusterReader("bigData_1"), boost::make_shared(3, kstandard, krandom), 45), + + BigDataTestsFixtureParam(ClusterReader("bigData_2"), boost::make_shared(3, kstandard, kkmeanspp), 25), + BigDataTestsFixtureParam(ClusterReader("bigData_2"), boost::make_shared(3, kstandard, krandom), 45), + + BigDataTestsFixtureParam(ClusterReader("bigData_3"), boost::make_shared(3, kstandard, kkmeanspp), 25), + BigDataTestsFixtureParam(ClusterReader("bigData_3"), boost::make_shared(3, kstandard, krandom), 45), + + BigDataTestsFixtureParam(ClusterReader("bigData_4"), boost::make_shared(3, kstandard, kkmeanspp), 25), + BigDataTestsFixtureParam(ClusterReader("bigData_4"), boost::make_shared(3, kstandard, krandom), 45) +)); + +INSTANTIATE_TEST_CASE_P(CEC, EnergyTests, ::testing::Values( + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, krandom), 20), + + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, krandom), 20), + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, kkmeanspp), 20), + + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(krandom), 20) +)); + +INSTANTIATE_TEST_CASE_P(CEC, CoverageTests, ::testing::Values( + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, krandom), 20), + + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, krandom), 20), + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, kkmeanspp), 20), + + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(krandom), 20) +)); + +INSTANTIATE_TEST_CASE_P(CEC, ControlEpsTests, ::testing::Values( + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1"), boost::make_shared(3, kspherical, krandom), 20), + + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, kkmeanspp), 20), + TestsFixtureParam(ClusterReader("EllipseGauss"), boost::make_shared(4, kstandard, krandom), 20), + + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, krandom), 20), + TestsFixtureParam(ClusterReader("simple_1"), boost::make_shared(1, kstandard, kkmeanspp), 20), + + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(kkmeanspp), 20), + TestsFixtureParam(ClusterReader("mouse_1_spherical"), boost::make_shared(krandom), 20) +)); diff --git a/tests/cpp/cec/test_data_dirs_list b/tests/cpp/cec/test_data_dirs_list new file mode 100644 index 00000000..c19710b2 --- /dev/null +++ b/tests/cpp/cec/test_data_dirs_list @@ -0,0 +1,12 @@ +bigData_1 +bigData_2 +bigData_3 +bigData_4 +UCI/connect_4 +UCI/covtype +UCI/glass +UCI/iris +UCI/pendigits +UCI/poker +UCI/vowel +UCI/wine diff --git a/tests/cpp/cec/test_data_list b/tests/cpp/cec/test_data_list new file mode 100644 index 00000000..a982d765 --- /dev/null +++ b/tests/cpp/cec/test_data_list @@ -0,0 +1,36 @@ +bigData_1/input.txt +bigData_1/type.txt +bigData_1/dimension.txt +bigData_2/input.txt +bigData_2/type.txt +bigData_2/dimension.txt +bigData_3/input.txt +bigData_3/type.txt +bigData_3/dimension.txt +bigData_4/input.txt +bigData_4/type.txt +bigData_4/dimension.txt +UCI/connect_4/input.txt +UCI/connect_4/cluster.txt +UCI/connect_4/dimension.txt +UCI/covtype/input.txt +UCI/covtype/cluster.txt +UCI/covtype/dimension.txt +UCI/glass/input.txt +UCI/glass/cluster.txt +UCI/glass/dimension.txt +UCI/iris/input.txt +UCI/iris/cluster.txt +UCI/iris/dimension.txt +UCI/pendigits/input.txt +UCI/pendigits/cluster.txt +UCI/pendigits/dimension.txt +UCI/poker/input.txt +UCI/poker/cluster.txt +UCI/poker/dimension.txt +UCI/vowel/input.txt +UCI/vowel/cluster.txt +UCI/vowel/dimension.txt +UCI/wine/input.txt +UCI/wine/cluster.txt +UCI/wine/dimension.txt diff --git a/tests/cpp/cec/tests_fixture.cpp b/tests/cpp/cec/tests_fixture.cpp new file mode 100644 index 00000000..6c17aae6 --- /dev/null +++ b/tests/cpp/cec/tests_fixture.cpp @@ -0,0 +1,75 @@ +#include "cec_configuration.hpp" +#include "tests_fixture.hpp" +#include "cluster_params.hpp" + +DefaultGmumParams::DefaultGmumParams(unsigned int nclusters, gmum::ClusterType cluster_type, gmum::AssignmentType assignment_type, int it_max) +{ + gmum_params.nclusters = nclusters; + gmum_params.cluster_type = cluster_type; + gmum_params.assignment_type = assignment_type; + gmum_params.it_max = it_max; +} + +MixTypeParamsThreeSpheres::MixTypeParamsThreeSpheres(gmum::AssignmentType assignment_type, int it_max) + : DefaultGmumParams(3, gmum::kmix, assignment_type, it_max) +{ + gmum_params.clusters.push_back(boost::make_shared(gmum::kspherical)); + gmum_params.clusters.push_back(boost::make_shared(gmum::kspherical)); + gmum_params.clusters.push_back(boost::make_shared(gmum::kspherical)); +} + +TestsFixtureParam::TestsFixtureParam(ClusterReader _reader, boost::shared_ptr _params, unsigned int _times) + : reader(_reader), default_params(_params), times(_times) +{ } + +BigDataTestsFixtureParam::BigDataTestsFixtureParam(ClusterReader _reader, boost::shared_ptr _params, unsigned int _iterations_limit, unsigned int _times) + : TestsFixtureParam(_reader, _params, _times), iterations_limit(_iterations_limit) +{ + gmum::Params& params = default_params->gmum_params; + params.it_max = _iterations_limit + 10; +} + +void TestsFixture::SetUp() +{ + TestsFixtureParam p(GetParam()); + params = p.default_params->gmum_params; + params.dataset = p.reader.get_points_in_matrix(); + params.nstart = 3; + times = p.times; + seed = 13371337; +} + +void BigDataTests::SetUp() +{ + BigDataTestsFixtureParam p(GetParam()); + params = p.default_params->gmum_params; + params.dataset = p.reader.get_points_in_matrix(); + params.nstart = 1; + iterations_limit = p.iterations_limit; + times = p.times; + seed = 13371337; +} + +void CoverageTests::SetUp() +{ + TestsFixture::SetUp(); + TestsFixtureParam p(GetParam()); + p.reader.get_clustering(expected_clustering); +} + +void EnergyTests::SetUp() +{ + TestsFixture::SetUp(); + TestsFixtureParam p(GetParam()); + expected_energy = p.reader.get_energy(); +} + +void ControlEpsTests::SetUp() +{ + TestsFixture::SetUp(); + params.kill_threshold = (params.dataset.n_rows - 1) / static_cast(params.dataset.n_rows); + params.log_nclusters = true; +} + + + diff --git a/tests/cpp/cec/tests_fixture.hpp b/tests/cpp/cec/tests_fixture.hpp new file mode 100644 index 00000000..88902b82 --- /dev/null +++ b/tests/cpp/cec/tests_fixture.hpp @@ -0,0 +1,82 @@ +#ifndef TESTS_FIXTURE_H +#define TESTS_FIXTURE_H + +#include +#include +#include "cluster_reader.hpp" +#include "params.hpp" + +struct DefaultGmumParams +{ + DefaultGmumParams(unsigned int nclusters, + gmum::ClusterType cluster_type, + gmum::AssignmentType assignment_type = gmum::kkmeanspp, + int it_max = -1); + + gmum::Params gmum_params; +}; + +struct MixTypeParamsThreeSpheres : public DefaultGmumParams +{ + MixTypeParamsThreeSpheres(gmum::AssignmentType assignment_type, int it_max = -1); +}; + +struct TestsFixtureParam +{ + TestsFixtureParam(ClusterReader _reader, boost::shared_ptr _params, unsigned int _times); + + ClusterReader reader; + boost::shared_ptr default_params; + unsigned int times; +}; + +class TestsFixture : public ::testing::TestWithParam< TestsFixtureParam > +{ +public: + virtual void SetUp(); +protected: + gmum::Params params; + unsigned int times; + int seed; +}; + +class EnergyTests : public TestsFixture +{ +public: + virtual void SetUp(); +protected: + double expected_energy; +}; + +class CoverageTests : public TestsFixture +{ +public: + virtual void SetUp(); +protected: + std::vector expected_clustering; +}; + +class ControlEpsTests : public TestsFixture +{ +public: + virtual void SetUp(); +}; + +struct BigDataTestsFixtureParam : public TestsFixtureParam +{ + BigDataTestsFixtureParam(ClusterReader _reader, boost::shared_ptr _params, unsigned int _iterations_limit, unsigned int _times = 1); + unsigned int iterations_limit; +}; + +class BigDataTests : public ::testing::TestWithParam< BigDataTestsFixtureParam > +{ +public: + virtual void SetUp(); +protected: + gmum::Params params; + unsigned int iterations_limit; + unsigned int times; + int seed; +}; + +#endif // TESTS_FIXTURE_H diff --git a/tests/cpp/gng/basic_integration_tests.cpp b/tests/cpp/gng/basic_integration_tests.cpp new file mode 100644 index 00000000..e99a53f6 --- /dev/null +++ b/tests/cpp/gng/basic_integration_tests.cpp @@ -0,0 +1,256 @@ +#include "gng/gng.h" +#include "gng/gng_server.h" +#include "utils/utils.h" + +#include "gtest/gtest.h" + +#include +#include +#include +#include + +using namespace std; +using namespace gmum; + +unsigned int sleep_ms = 500; + +/** Run GNGAlgorithm on a cube (3-dimensional) with given parameters + * @returns pair : nodes, mean_error + */ +pair test_convergence(GNGConfiguration * cnf = 0, + int num_database = 1000, int ms_loop = 5000, string save_filename = "", + double* extra_examples = 0, int extra_samples_size = 0, + string load_filename = "") { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = true; + + if (cnf) + config = *cnf; + config.datasetType = GNGConfiguration::DatasetSeq; + + GNGServer * s; + + if (load_filename != "") { + cerr << "Loading everything from file!\n"; + s = new GNGServer(load_filename); + } else { + s = GNGServer::constructTestServer(config); + } + + cerr << s->getGraph().reportPool() << endl; + + s->run(); + + cerr << "Allocating " << (config.dim) * num_database << endl << flush; + double * vect = new double[(config.dim) * num_database]; + for (int i = 0; i < num_database; ++i) { + for (int j = 0; j <= config.dim; ++j) + if (j == 0) + vect[j + (i) * (config.dim)] = 0.0; + else if (j < config.dim) + vect[j + (i) * (config.dim)] = __double_rnd(0, 1); + } + + cerr << "Allocated examples\n"; + + if (extra_examples) { + cerr << "Adding extra examples\n"; + s->insertExamples(extra_examples, 0, 0, + extra_samples_size / (config.dim), config.dim); + } + + cerr << "Adding main examples\n"; + s->insertExamples(vect, 0, 0, num_database, config.dim); + + cerr << "Dimensionality of example is " << s->getDatabase().getDataDim() + << endl; + + for (int i = 0; i < 10; ++i) { + cerr << "Exemplary sample (testing memory correctness):\n"; + int ex = s->getDatabase().drawExample(); + write_array(s->getDatabase().getPosition(ex), + s->getDatabase().getPosition(ex) + (config.dim + 1)); + } + + + + cerr << "testNewInterface::Collecting results\n"; + + int iteration = 0; + + while (true) { + ++iteration; + + gmum::sleep(sleep_ms); + if (iteration >= ms_loop / sleep_ms) + break; + } + + s->terminate(); + + gmum::sleep(sleep_ms); + + pair t = pair( + s->getGraph().get_number_nodes(), s->getMeanError()); + if (save_filename != "") { + cerr << "GNGNumericTest::Saving to GraphML\n"; + writeToGraphML(s->getGraph(), save_filename); + } + + delete s; + return t; +} + +TEST(GNGNumericTest, BasicConvergenceUtility) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.experimental_utility_option = GNGConfiguration::UtilityBasicOn; + config.verbosity = 3; + + pair results = test_convergence(&config, 1000, 6000); + ASSERT_GE(fabs(results.first), 60.0); + ASSERT_LE(fabs(results.second), 1e-4); +} + +TEST(GNGNumericTest, Serialization) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = true; + int num_database = 4000; + int ms_loop = 10000; + + GNGServer *s = GNGServer::constructTestServer(config); + + s->run(); + + cerr << "Allocating " << (config.dim) * num_database << endl << flush; + double * vect = new double[(config.dim) * num_database]; + for (int i = 0; i < num_database; ++i) { + for (int j = 0; j <= config.dim; ++j) + if (j == 0) + vect[j + (i) * (config.dim)] = 0.0; + else if (j < config.dim) + vect[j + (i) * (config.dim)] = __double_rnd(0, 1); + + } + + cerr << "Adding main examples\n"; + s->insertExamples(&vect[0], 0, 0, num_database, config.dim); + cerr << "Dimensionality of example is " << s->getDatabase().getDataDim() + << endl; + for (int i = 0; i < 10; ++i) { + cerr << "Exemplary sample (testing memory correctness):\n"; + int ex = s->getDatabase().drawExample(); + write_array(s->getDatabase().getPosition(ex), + s->getDatabase().getPosition(ex) + (config.dim + 1)); + } + + cerr << "testNewInterface::Collecting results\n"; + + int iteration = 0; + + while (true) { + ++iteration; + gmum::sleep(sleep_ms); + if (iteration >= ms_loop / sleep_ms) + break; + } + + s->terminate(); + gmum::sleep(sleep_ms); + + s->save("test_serialization.bin"); + delete s; + + pair results = test_convergence(&config, 1000, 1000, + "" /*save filename*/, 0 /*extra examples*/, 0 /*extra_sample_size*/, + "test_serialization.bin" /*load_filename*/); + + ASSERT_GE(fabs(results.first), 550.0); + ASSERT_LE(fabs(results.second), 1e-2); +} + +TEST(GNGNumericTest, BasicConvergence) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + pair results = test_convergence(&config, 1000, 5000, + "basic_convergence.graphml"); + ASSERT_GE(fabs(results.first), 60.0); + ASSERT_LE(fabs(results.second), 1e-4); +} + +TEST(GNGNumericTest, FewDimsSkewedUGConvergence) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = true; + config.max_nodes = 1000; + config.lazyheap_optimization = true; + config.dim = 5; + config.axis = vector(config.dim, 20.0); + config.orig = vector(config.dim, -1.0); + config.orig[2] = -4.0; + + //vector would be better here obviously. + int num_extra = 50000; + double * extra_examples = new double[num_extra * (config.dim + 1)]; + for (int i = 0; i < num_extra; ++i) { + for (int j = 0; j <= config.dim; ++j) + extra_examples[j + (i) * (config.dim + 1)] = __double_rnd(0, 2) + + (2.0); + } + + pair results = test_convergence(&config, 100000, 60000, + "fewdims.graphml", extra_examples, num_extra * (config.dim + 1)); + + ASSERT_GE(results.first, 10.0); + ASSERT_LE(fabs(results.second), 3e-1); +} + +TEST(GNGNumericTest, FewDimsUGConvergence) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = true; + config.max_nodes = 2000; + config.verbosity = 8; + config.lazyheap_optimization = true; + config.dim = 4; + config.setBoundingBox(0.0, 1.0); + + pair results = test_convergence(&config, 1000, 60000, + "fewdimsugconvergence.graphml"); + + ASSERT_GE(results.first, 10.0); + ASSERT_LE(fabs(results.second), 1e-4); +} +TEST(GNGNumericTest, ManyDimsUGConvergence) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = true; + config.lazyheap_optimization = true; + config.dim = 10; + config.setBoundingBox(0.0, 1.0); + + pair results = test_convergence(&config, 100, 3000); + + ASSERT_GE(results.first, 10.0); + ASSERT_LE(fabs(results.second), 1e-4); +} + +TEST(GNGNumericTest, ManyDimsNoUG) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.uniformgrid_optimization = false; + config.dim = 50; + config.setBoundingBox(0.0, 1.0); + + pair results = test_convergence(&config, 100, 50000); + + ASSERT_GE(fabs(results.first), 100.0); + ASSERT_LE(fabs(results.second), 1e-20); +} + +TEST(GNGNumericTest, BasicConvergeLazyHeapUG) { + GNGConfiguration config = GNGConfiguration::getDefaultConfiguration(); + config.lazyheap_optimization = true; + config.max_nodes = 2000; + config.uniformgrid_optimization = true; + cerr << "Checking correctness " + to_string(config.check_correctness()) + << endl; + config.check_correctness(); + pair results = test_convergence(&config, 10000, 1000); + ASSERT_GE(results.first, 10.0); + ASSERT_LE(fabs(results.second), 1e-2); +} diff --git a/tests/cpp/gng/parts_tests.cpp b/tests/cpp/gng/parts_tests.cpp new file mode 100644 index 00000000..8890fd55 --- /dev/null +++ b/tests/cpp/gng/parts_tests.cpp @@ -0,0 +1,207 @@ +#include "gng/gng.h" +#include "utils/utils.h" +#include "gng/gng_graph.h" +#include "gng/gng_dataset.h" +#include "utils/threading.h" + +#include +#include +#include + +#include "gtest/gtest.h" +/** Include all tests */ + +using namespace std; +using namespace gmum; +/* + * Basic test + */ +TEST(GraphTests, BasicGraphTest) { + boost::shared_ptr logger = boost::shared_ptr( + new Logger(10)); + cerr << "Testing GraphTest\n"; + + int N_start = 30; + + unsigned int dim = 6; + + gmum::recursive_mutex grow_mutex; + RAMGNGGraph g(&grow_mutex, dim, N_start, //Initial pool size + GNGGraph::Euclidean, //Used metric + logger); //Logger + + ASSERT_EQ(g.existsNode(0), false); + ASSERT_EQ(g.get_number_nodes(), 0); + + double x[6] = { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6 }; + + //Just fill in + for (int i = 0; i < N_start; ++i) { + int idx = g.newNode(x); + ASSERT_EQ(idx, i); + } + + //Enforce growing + x[0] *= 100; + + cerr << "Getting size before regrowing\n" << g[0].size() << " " + << g[0].capacity() << endl << flush; + + g.deleteNode(2); + cerr << "Checking writing before growing\n"; + string graphml = writeToGraphML(g); + + int idx = g.newNode(x); + ASSERT_EQ(idx, 2); + + ASSERT_EQ(g[N_start - 2].nr, N_start - 2); //Check memory consistency + ASSERT_EQ(g.newNode(x), N_start); + ASSERT_EQ(g.get_number_nodes(), N_start + 1); + ASSERT_EQ(g.newNode(x), N_start + 1); + ASSERT_EQ(g.get_number_nodes(), N_start + 2); + + ASSERT_EQ(g.get_maximum_index(), N_start + 1); + + g.deleteNode(10); + g.deleteNode(15); + g.deleteNode(20); + + ASSERT_EQ(g.get_number_nodes(), N_start - 1); + + /** WARNING_LEVEL: highly intrusive test ! Can change implementation*/ + ASSERT_EQ(g.first_free, 20); + ASSERT_EQ(g.next_free[g.first_free], 15); //might fail if not doubling + ASSERT_EQ(g.next_free[15], 10); + + g.addUDEdge(0, 1); + g.addUDEdge(0, 2); + g.addUDEdge(2, 5); + + ASSERT_EQ(g.isEdge(0, 2), true); + g.removeUDEdge(0, 2); + ASSERT_EQ(g.isEdge(0, 1), true); + ASSERT_EQ(g.isEdge(0, 2), false); + ASSERT_EQ(g[0].size(), 1); + ASSERT_EQ(g[5].size(), 1); + + for (int i = 0; i < g.get_maximum_index(); ++i) { + if (g.existsNode(i)) + ASSERT_EQ(g[i].position[1], 0.2); + } + + graphml = writeToGraphML(g); + + //Check memory consistency + ASSERT_EQ(g[0].position[3], 0.4); + + DBG(logger,10, "Test OK"); + + //Check regrowing + for (int i = 0; i < 20 * N_start; ++i) { + int idx = g.newNode(x); + int cn = __int_rnd(0, i); + while (!g.existsNode(cn)) { + cn = __int_rnd(0, i); + } + g.addUDEdge(idx, cn); + } + + string pool_before = g.reportPool(); + + set edges_first_10_bef; + for (int i = 0; i < 10; ++i) { + if (g.existsNode(i)) + for (int k = 0; k < g[i].size(); ++k) { + edges_first_10_bef.insert(g[i][k]->nr); + } + } + vector serialized_edges_first_10_bef; + std::copy(edges_first_10_bef.begin(), edges_first_10_bef.end(), + std::back_inserter(serialized_edges_first_10_bef)); + + std::ofstream output; + output.open("graph.bin", ios::out | ios::binary); + + g.serialize(output); + + RAMGNGGraph g2(&grow_mutex, dim, N_start, + GNGGraph::Euclidean, logger); + output.close(); + + std::ifstream input; + input.open("graph.bin", ios::in | ios::binary); + + g2.load(input); + + string pool_after = g2.reportPool(); + + set edges_first_10_aft; + for (int i = 0; i < 10; ++i) { + if (g2.existsNode(i)) + for (int k = 0; k < g2[i].size(); ++k) { + edges_first_10_aft.insert(g2[i][k]->nr); + } + } + vector serialized_edges_first_10_aft; + std::copy(edges_first_10_aft.begin(), edges_first_10_aft.end(), + std::back_inserter(serialized_edges_first_10_aft)); + + ASSERT_TRUE( + std::equal(serialized_edges_first_10_aft.begin(), + serialized_edges_first_10_aft.end(), + serialized_edges_first_10_bef.begin())); + + for (int i = 0; i < 1000; ++i) { + int id = g2.newNode(x); + g2.addUDEdge(id, 0); + } + + g2.reportPool(); +} + +TEST(DatabaseTests, BasicDatasetTest) { + boost::shared_ptr logger = boost::shared_ptr( + new Logger(10)); + int m_verbosity = 3; + + unsigned int dim = 6; + gmum::recursive_mutex phase_2_lock; + unsigned int num_examples = 100, meta_data_dim = 1; + GNGDatasetSimple dataset2(&phase_2_lock, dim, + true /* store_extra */, + GNGDatasetSimple::SamplingProbability, 777, logger); + + double * x = new double[num_examples * (dim)]; + double * labels = new double[num_examples]; + double * probabilities = new double[num_examples]; + + + for (int i = 0; i < num_examples; ++i) { + for (int j = 0; j < dim; ++j) { + x[i * dim + j] = 0.2; + } + labels[i] = i; + probabilities[i] = 0.3; + } + dataset2.insertExamples(x, labels, probabilities, num_examples); + + + for (int i = 0; i < num_examples; ++i) { + for (int j = 0; j < dim; ++j) { + x[i * dim + j] = j / (2 * (float) dim); + } + labels[i] = i - 2; + probabilities[i] = 0.9; + } + dataset2.insertExamples(x, labels, probabilities, num_examples); + + for (int i = 0; i < 1000; ++i) { + unsigned int a = dataset2.drawExample(); + unsigned int b = dataset2.drawExample(); + unsigned int c = dataset2.drawExample(); + + ASSERT_LE(dataset2.getPosition(a)[0], 0.9); + ASSERT_LE(dataset2.getPosition(b)[0], 0.9); + ASSERT_LE(dataset2.getPosition(c)[0], 0.9); + } +} diff --git a/tests/cpp/svm/2_e_test.cpp b/tests/cpp/svm/2_e_test.cpp new file mode 100644 index 00000000..cc888d47 --- /dev/null +++ b/tests/cpp/svm/2_e_test.cpp @@ -0,0 +1,81 @@ +#include "gtest/gtest.h" +#include +#include "libsvm_runner.h" +#include "svm_basic.h" +#include "two_e_svm_post.h" +#include "two_e_svm_pre.h" +#include "svm_utils.h" + +using namespace arma; + +TEST(SVMUtilsT7est, SqrtInvOfMatrix) { + double epsilon = 0.05; + mat A; + A << 3 << 4 << 4 << endr + << 1 << 3 << 4 << endr + << 5 << 3 << 2 < +#include "libsvm_runner.h" +#include "svm_basic.h" +#include "svm_client.h" +#include "svm_helpers.h" + +using namespace arma; + +//TEST(LibSVMRunnerTest, TrainingFilePredictionFile) { +// SVMConfiguration svm_config; +// mat A; +// A << 0 << 0 << endr << 1 << 1 << endr; +// vec b; +// b << -1 << 1; +// svm_config.setPrediction(false); // training model +// svm_config.data = A; +// svm_config.target = b; +// svm_config.setModelFilename("bika.txt"); +// LibSVMRunner lib_svm_runner; +// lib_svm_runner.processRequest(svm_config); +// svm_config.setPrediction(true); +// svm_config.setFilename("test.txt"); +// svm_config.setOutputFilename("test.txt"); +// lib_svm_runner.processRequest(svm_config); +//} +// +//TEST(LibSVMRunnerTest, ArmaTrainingPredictionFile) { +// using namespace arma; +// SVMConfiguration svm_config; +// mat A; +// A << 0 << 0 << endr << 1 << 1 << endr; +// vec b; +// b << -1 << 1; +// svm_config.setPrediction(false); // training model +// svm_config.data = A; +// svm_config.target = b; +// svm_config.setModelFilename("bika.txt"); +// LibSVMRunner lib_svm_runner; +// lib_svm_runner.processRequest(svm_config); +// svm_config.setPrediction(true); +// svm_config.setFilename("test.txt"); +// svm_config.setOutputFilename("test.txt"); +// lib_svm_runner.processRequest(svm_config); +//} +// +//TEST(LibSVMRunnerTest, ArmaTrainingArmaPrediction) { +// +// SVMConfiguration svm_config; +// mat A; +// A << 0 << 0 << endr << 1 << 1 << endr; +// vec b; +// b << -1 << 1; +// svm_config.setPrediction(false); // training model +// svm_config.data = A; +// svm_config.target = b; +// svm_config.setModelFilename("svm_test_model.txt"); +// LibSVMRunner lib_svm_runner; +// lib_svm_runner.processRequest(svm_config); +// +// svm_config.setPrediction(true); +// lib_svm_runner.processRequest(svm_config); +// +// ASSERT_EQ(-1.0, svm_config.result[0]); +// ASSERT_EQ(1.0, svm_config.result[1]); +//} + +TEST(LibSVMRunnerTest, ArmaTrainingConfigModel) { + + SVMConfiguration svm_config; + mat A; + A << 0 << 0 << endr << 1 << 1 << endr; + vec b; + b << -1 << 1; + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + +// svm_config.setPrediction(true); +// lib_svm_runner.processRequest(svm_config); +// +// ASSERT_EQ(-1.0, svm_config.result[0]); +// ASSERT_EQ(1.0, svm_config.result[1]); +} + +TEST(LibSVMRunnerTest, ArmaTrainingArmaPredictionConfigModel2x2) { + SVMConfiguration svm_config; + mat A; + A << 0 << 0 << endr << 1 << 1 << endr; + vec b; + b << -1 << 1; + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + + svm_config.setPrediction(true); + lib_svm_runner.processRequest(svm_config); + + ASSERT_EQ(-1.0, svm_config.result[0]); + ASSERT_EQ(1.0, svm_config.result[1]); +} + +TEST(LibSVMRunnerTest, DoubleArmaTrainingDoubleArmaPredictionConfigModel4x2) { + + SVMConfiguration svm_config; + mat A; + A << 0 << 0 << endr + << 1 << 1 << endr + << 2 << 2 << endr + << 3 << 3 << endr; + vec b; + b << -1 << -1 << 1 << 1; + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + lib_svm_runner.processRequest(svm_config); + + svm_config.setPrediction(true); + lib_svm_runner.processRequest(svm_config); + lib_svm_runner.processRequest(svm_config); + + ASSERT_EQ(-1.0, svm_config.result[0]); + ASSERT_EQ(-1.0, svm_config.result[1]); + ASSERT_EQ(1.0, svm_config.result[2]); + ASSERT_EQ(1.0, svm_config.result[3]); +} + +TEST(LibSVMRunnerTest, ArmaTrainingArmaPredictionConfigModel4x2) { + + SVMConfiguration svm_config; + mat A; + A << 0 << 0 << endr + << 1 << 1 << endr + << 2 << 2 << endr + << 3 << 3 << endr; + vec b; + b << -1 << -1 << 1 << 1; + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + + svm_config.setPrediction(true); + lib_svm_runner.processRequest(svm_config); + + ASSERT_EQ(-1.0, svm_config.result[0]); + ASSERT_EQ(-1.0, svm_config.result[1]); + ASSERT_EQ(1.0, svm_config.result[2]); + ASSERT_EQ(1.0, svm_config.result[3]); +} + +TEST(LibSVMRunnerTest, ArmaTrainingArmaPredictionConfigModel4x2OtherPoints) { + + SVMConfiguration svm_config; + mat A; + A << 0 << 0 << endr + << 1 << 1 << endr + << 2 << 2 << endr + << 3 << 3 << endr; + vec b; + b << -1 << -1 << 1 << 1; + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + + mat A2; + A2 << 0 << 0 << endr + << 30 << 40 << endr + << 50 << 60 << endr + << -1 << -1 << endr; + + vec b2; + b2 << -1 << 1 << 1 << -1; + + svm_config.data = A2; + svm_config.target = b2; + + svm_config.setPrediction(true); + lib_svm_runner.processRequest(svm_config); + + ASSERT_EQ(-1.0, svm_config.result[0]); + ASSERT_EQ(1.0, svm_config.result[1]); + ASSERT_EQ(1.0, svm_config.result[2]); + ASSERT_EQ(-1.0, svm_config.result[3]); +} + +TEST(LibSVMRunnerTest, integration_svmclient_predict) { + SVMConfiguration svm_config; + arma::mat A = helper_learning_data_01(); + arma::mat b = helper_learning_target_01(); + svm_config.setPrediction(false); // training model + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + + std::cout << "Testing SVMClient prediction..." << std::endl << std::flush; + svm_config.data = helper_testing_data_01(); + svm_config.setPrediction(true); + SVMClient *svm_client = new SVMClient(&svm_config); + svm_client->predict(svm_config.data); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < 4; ++i) { + ASSERT_DOUBLE_EQ( + client_config.result[i], helper_testing_target_01()[i]); + } +} + +TEST(LibSVMRunnerTest, svm_options_01) { + std::cout << "Learning..." << std::endl << std::flush; + SVMConfiguration svm_config; + arma::mat A = helper_learning_data_01(); + arma::mat b = helper_learning_target_01(); + svm_config.setPrediction(false); // training model + svm_config.svm_options = "-c 0.24"; + svm_config.data = A; + svm_config.target = b; + LibSVMRunner lib_svm_runner; + lib_svm_runner.processRequest(svm_config); + + std::cout << "Testing SVMClient prediction..." << std::endl << std::flush; + svm_config.data = helper_testing_data_01(); + svm_config.setPrediction(true); + SVMClient *svm_client = new SVMClient(&svm_config); + svm_client->predict(svm_config.data); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < 4; ++i) { + // NOTE: `svm-predict` after `svm-train -c 0.24` predicts 1 + // (incorrectly) for all of this data + ASSERT_DOUBLE_EQ(client_config.result[i], 1); + } +} diff --git a/tests/cpp/svm/svm_client_test.cpp b/tests/cpp/svm/svm_client_test.cpp new file mode 100644 index 00000000..54173579 --- /dev/null +++ b/tests/cpp/svm/svm_client_test.cpp @@ -0,0 +1,101 @@ +#include +#include +#include "gtest/gtest.h" + +#include "svm_helpers.h" +#include "svm_client.h" +#include "utils/logger.h" + +namespace { + +int log_level = LogLevel::DEBUG_LEVEL; + +// The fixture +class SVMClientTest: public ::testing::Test { + +protected: + + SVMClientTest() { + std::cout << "Creating SVMConfiguration..." << std::endl << std::flush; + svm_config = SVMConfiguration(); + std::cout << "Setting logger..." << std::endl << std::flush; + svm_config.log.verbosity = log_level; + + // Exemplary sparse matrix + sparse_matrix_csc_01_row_indices = + helper_sparse_matrix_csc_01_row_indices(); + sparse_matrix_csc_01_column_pointers = + helper_sparse_matrix_csc_01_column_pointers(); + sparse_matrix_csc_01_values = + helper_sparse_matrix_csc_01_values(); + sparse_matrix_csc_01_nrow = + helper_sparse_matrix_csc_01_nrow(); + sparse_matrix_csc_01_ncol = + helper_sparse_matrix_csc_01_ncol(); + sparse_matrix_csc_01_sp_mat = + helper_sparse_matrix_csc_01_sp_mat(); + sparse_matrix_csc_01_learning_target = + helper_sparse_matrix_csc_01_learning_target(); + + std::cout << "Starting test..." << std::endl << std::flush; + } + + virtual ~SVMClientTest() {} + + /// Called immediately after the constructor (righ before each test) + virtual void SetUp() {} + + /// Called immediately after each test (right before the destructor) + virtual void TearDown() {} + + /* Objects declared here can be used by all tests in the test case */ + + SVMConfiguration svm_config; + + arma::uvec sparse_matrix_csc_01_row_indices; + arma::uvec sparse_matrix_csc_01_column_pointers; + arma::vec sparse_matrix_csc_01_values; + size_t sparse_matrix_csc_01_nrow; + size_t sparse_matrix_csc_01_ncol; + arma::sp_mat sparse_matrix_csc_01_sp_mat; + arma::vec sparse_matrix_csc_01_learning_target; +}; + + +} // end namespace + +/* Fixture tests */ + +TEST_F(SVMClientTest, DISABLED_sparse_data_test) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.setSparseData( + sparse_matrix_csc_01_row_indices, + sparse_matrix_csc_01_column_pointers, + sparse_matrix_csc_01_values, + sparse_matrix_csc_01_nrow, + sparse_matrix_csc_01_ncol, + true + ); + //svm_config.sparse_data = sparse_matrix_csc_01_sp_mat; + svm_config.target = sparse_matrix_csc_01_learning_target; + svm_config.setLibrary("libsvm"); + SVMClient *svm_client = new SVMClient(&svm_config); + // XXX: Why access violation here? + svm_client->train(); + + std::cout << "Testing prediction..." << std::endl << std::flush; + svm_client->sparse_predict( + sparse_matrix_csc_01_row_indices, + sparse_matrix_csc_01_column_pointers, + sparse_matrix_csc_01_values, + sparse_matrix_csc_01_nrow, + sparse_matrix_csc_01_ncol + ); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < sparse_matrix_csc_01_learning_target.n_rows; ++i) { + ASSERT_DOUBLE_EQ( + client_config.result[i], sparse_matrix_csc_01_learning_target[i]); + } +} + diff --git a/tests/cpp/svm/svm_configuration_test.cpp b/tests/cpp/svm/svm_configuration_test.cpp new file mode 100644 index 00000000..3b619a06 --- /dev/null +++ b/tests/cpp/svm/svm_configuration_test.cpp @@ -0,0 +1,85 @@ +#include +#include +#include "gtest/gtest.h" + +#include "svm_helpers.h" +#include "svm_basic.h" +#include "utils/logger.h" + +namespace { + +int log_level = LogLevel::DEBUG_LEVEL; + +// The fixture +class SVMConfigurationTest: public ::testing::Test { + +protected: + + SVMConfigurationTest() { + std::cout << "Creating SVMConfiguration..." << std::endl << std::flush; + svm_config = SVMConfiguration(); + std::cout << "Setting logger..." << std::endl << std::flush; + svm_config.log.verbosity = log_level; + + // Exemplary sparse matrix + sparse_matrix_csc_01_row_indices = + helper_sparse_matrix_csc_01_row_indices(); + sparse_matrix_csc_01_column_pointers = + helper_sparse_matrix_csc_01_column_pointers(); + sparse_matrix_csc_01_values = + helper_sparse_matrix_csc_01_values(); + sparse_matrix_csc_01_nrow = + helper_sparse_matrix_csc_01_nrow(); + sparse_matrix_csc_01_ncol = + helper_sparse_matrix_csc_01_ncol(); + sparse_matrix_csc_01_sp_mat = + helper_sparse_matrix_csc_01_sp_mat(); + + std::cout << "Starting test..." << std::endl << std::flush; + } + + virtual ~SVMConfigurationTest() {} + + /// Called immediately after the constructor (righ before each test) + virtual void SetUp() {} + + /// Called immediately after each test (right before the destructor) + virtual void TearDown() {} + + /* Objects declared here can be used by all tests in the test case */ + + SVMConfiguration svm_config; + + arma::uvec sparse_matrix_csc_01_row_indices; + arma::uvec sparse_matrix_csc_01_column_pointers; + arma::vec sparse_matrix_csc_01_values; + size_t sparse_matrix_csc_01_nrow; + size_t sparse_matrix_csc_01_ncol; + arma::sp_mat sparse_matrix_csc_01_sp_mat; +}; + + +} // end namespace + +/* Fixture tests */ + +TEST_F(SVMConfigurationTest, setSparseData) { + std::cout << "SVMConfiguration sparse data..." << std::endl; + svm_config.setSparseData( + sparse_matrix_csc_01_column_pointers, + sparse_matrix_csc_01_row_indices, + sparse_matrix_csc_01_values, + sparse_matrix_csc_01_nrow, + sparse_matrix_csc_01_ncol, + true + ); + + + for (size_t i = 0; i < svm_config.sparse_data.n_cols; ++i) { + for (size_t j = 0; j < svm_config.sparse_data.n_rows; ++j) { + // Sparse matrix is currently being held as transposed + ASSERT_EQ(svm_config.sparse_data(i, j), sparse_matrix_csc_01_sp_mat(i, j)); + } + } +} + diff --git a/tests/cpp/svm/svm_helpers.cpp b/tests/cpp/svm/svm_helpers.cpp new file mode 100644 index 00000000..bf4ddd01 --- /dev/null +++ b/tests/cpp/svm/svm_helpers.cpp @@ -0,0 +1,113 @@ + +#include "svm_helpers.h" + +// Useful helpers for multiple test cases + +arma::mat helper_learning_data_01() { + arma::mat matrix; + matrix + << 0.5 << 1.0 << 0.0 << 1.0 << arma::endr + << 0.4 << 1.1 << 0.0 << 1.0 << arma::endr + << 0.5 << 0.9 << 1.0 << 0.0 << arma::endr + << 0.5 << 1.0 << 1.0 << 0.0 << arma::endr + << 0.4 << 1.1 << 1.0 << 0.0 << arma::endr; + return matrix; +} + +arma::vec helper_learning_target_01() { + arma::vec vector; + vector + << -1 << -1 << 1 << 1 << 1; + return vector; +} + +arma::vec helper_learning_target_02() { + arma::vec vector; + vector + << -1 << -1 << 1 << 1 << 1; + return vector; +} + +arma::mat helper_testing_data_01() { + arma::mat matrix; + matrix + << 0.4 << 0.9 << 0.0 << 1.0 << arma::endr + << 0.5 << 0.9 << 0.0 << 1.0 << arma::endr + << 0.4 << 1.0 << 1.0 << 0.0 << arma::endr + << 0.5 << 1.0 << 1.0 << 0.0 << arma::endr; + return matrix; +} + +arma::vec helper_testing_target_01() { + arma::vec vector; + vector + << -1 << -1 << 1 << 1; + return vector; +} + +arma::vec helper_testing_target_02() { + arma::vec vector; + vector + << -1 << -1 << 1 << 1; + return vector; +} + +// Sparse matrix CSC (CCS) data from: +// http://netlib.org/linalg/html_templates/node92.html + +arma::uvec helper_sparse_matrix_csc_01_row_indices() { + arma::uvec vector; + vector + << 1 << 2 << 4 << 2 << 3 << 5 << 6 << 3 << 4 << 3 << 4 << 5 << 1 << 4 + << 5 << 6 << 2 << 5 << 6; + return vector; +} + +arma::uvec helper_sparse_matrix_csc_01_column_pointers() { + arma::uvec vector; + vector + << 1 << 4 << 8 << 10 << 13 << 17 << 20; + return vector; +} + +arma::vec helper_sparse_matrix_csc_01_values() { + arma::vec vector; + vector + << 10 << 3 << 3 << 9 << 7 << 8 << 4 << 8 << 8 << 7 << 7 << 9 << -2 << 5 + << 9 << 2 << 3 << 13 << -1; + return vector; +} + +size_t helper_sparse_matrix_csc_01_nrow() { + return 6; +} + +size_t helper_sparse_matrix_csc_01_ncol() { + return 6; +} + +arma::mat helper_sparse_matrix_csc_01_mat() { + arma::mat matrix; + matrix + << 10 << 0 << 0 << 0 << -2 << 0 << arma::endr + << 3 << 9 << 0 << 0 << 0 << 3 << arma::endr + << 0 << 7 << 8 << 7 << 0 << 0 << arma::endr + << 3 << 0 << 8 << 7 << 5 << 0 << arma::endr + << 0 << 8 << 0 << 9 << 9 << 13 << arma::endr + << 0 << 4 << 0 << 0 << 2 << -1 << arma::endr; + return matrix; +} + +arma::sp_mat helper_sparse_matrix_csc_01_sp_mat() { + arma::sp_mat matrix(helper_sparse_matrix_csc_01_mat()); + return matrix; +} + +arma::vec helper_sparse_matrix_csc_01_learning_target() { + arma::vec vector; + // If there is a 8 in the third column + vector + << 1 << 1 << -1 << -1 << 1 << 1; + return vector; +} + diff --git a/tests/cpp/svm/svm_helpers.h b/tests/cpp/svm/svm_helpers.h new file mode 100644 index 00000000..19016e9a --- /dev/null +++ b/tests/cpp/svm/svm_helpers.h @@ -0,0 +1,37 @@ +#ifndef SVM_HELPERS_H +#define SVM_HELPERS_H + +#include + +// Useful helpers for multiple test cases + +arma::mat helper_learning_data_01(); + +arma::vec helper_learning_target_01(); + +arma::vec helper_learning_target_02(); + +arma::mat helper_testing_data_01(); + +arma::vec helper_testing_target_01(); + +arma::vec helper_testing_target_02(); + +arma::uvec helper_sparse_matrix_csc_01_row_indices(); + +arma::uvec helper_sparse_matrix_csc_01_column_pointers(); + +arma::vec helper_sparse_matrix_csc_01_values(); + +size_t helper_sparse_matrix_csc_01_nrow(); + +size_t helper_sparse_matrix_csc_01_ncol(); + +arma::mat helper_sparse_matrix_csc_01_mat(); + +arma::sp_mat helper_sparse_matrix_csc_01_sp_mat(); + +arma::vec helper_sparse_matrix_csc_01_learning_target(); + +#endif + diff --git a/tests/cpp/svm/svm_utils_test.cpp b/tests/cpp/svm/svm_utils_test.cpp new file mode 100644 index 00000000..05baf768 --- /dev/null +++ b/tests/cpp/svm/svm_utils_test.cpp @@ -0,0 +1,10 @@ +#include "gtest/gtest.h" +#include +#include +#include + +TEST(SvmUtilsTests, LibSvmToArma) { + int nodes; + svm_node** node; + +} diff --git a/tests/cpp/svm/svmlight_test.cpp b/tests/cpp/svm/svmlight_test.cpp new file mode 100644 index 00000000..4632f8ee --- /dev/null +++ b/tests/cpp/svm/svmlight_test.cpp @@ -0,0 +1,317 @@ +#include +#include +#include +#include "gtest/gtest.h" + +#include "svm_helpers.h" +#include "svmlight_runner.h" +#include "svm_basic.h" +#include "svm_client.h" +#include "utils/logger.h" + +namespace { + +int log_level = LogLevel::DEBUG_LEVEL; + +double *null_double_ptr = 0; +long *null_long_ptr = 0; +long null_long = 0; + +// The fixture for testing class SVMLightRunner +class SVMLightRunnerTest: public ::testing::Test { + +protected: + + SVMLightRunnerTest() { + std::cout << "Creating SVMLightRunner..." << std::endl << std::flush; + svmlr = SVMLightRunner(); + std::cout << "Creating second SVMLightRunner..." << std::endl << std::flush; + second_svmlr = SVMLightRunner(); + std::cout << "Creating SVMConfiguration..." << std::endl << std::flush; + svm_config = SVMConfiguration(); + std::cout << "Setting logger..." << std::endl << std::flush; + svm_config.log.verbosity = log_level; + std::cout << "Creating second SVMConfiguration..." << std::endl << std::flush; + second_svm_config = SVMConfiguration(); + std::cout << "Setting logger..." << std::endl << std::flush; + second_svm_config.log.verbosity = log_level; + + learning_data_01 = helper_learning_data_01(); + learning_target_01 = helper_learning_target_01(); + learning_target_02 = helper_learning_target_02(); + + testing_data_01 = helper_testing_data_01(); + testing_target_01 = helper_testing_target_01(); + testing_target_02 = helper_testing_target_02(); + + sparse_matrix_csc_01_row_indices = + helper_sparse_matrix_csc_01_row_indices(); + sparse_matrix_csc_01_column_pointers = + helper_sparse_matrix_csc_01_column_pointers(); + sparse_matrix_csc_01_values = + helper_sparse_matrix_csc_01_values(); + sparse_matrix_csc_01_nrow = + helper_sparse_matrix_csc_01_nrow(); + sparse_matrix_csc_01_ncol = + helper_sparse_matrix_csc_01_ncol(); + sparse_matrix_csc_01_sp_mat = + helper_sparse_matrix_csc_01_sp_mat(); + sparse_matrix_csc_01_learning_target = + helper_sparse_matrix_csc_01_learning_target(); + + std::cout << "Starting test..." << std::endl << std::flush; + } + + virtual ~SVMLightRunnerTest() {} + + /// Called immediately after the constructor (righ before each test) + virtual void SetUp() {} + + /// Called immediately after each test (right before the destructor) + virtual void TearDown() {} + + /* Objects declared here can be used by all tests in the test case */ + + SVMLightRunner svmlr; + SVMLightRunner second_svmlr; + SVMConfiguration svm_config; + SVMConfiguration second_svm_config; + + arma::mat learning_data_01; + arma::vec learning_target_01; + arma::vec learning_target_02; + arma::mat testing_data_01; + arma::vec testing_target_01; + arma::vec testing_target_02; + + arma::uvec sparse_matrix_csc_01_row_indices; + arma::uvec sparse_matrix_csc_01_column_pointers; + arma::vec sparse_matrix_csc_01_values; + size_t sparse_matrix_csc_01_nrow; + size_t sparse_matrix_csc_01_ncol; + arma::sp_mat sparse_matrix_csc_01_sp_mat; + arma::vec sparse_matrix_csc_01_learning_target; +}; + + +} // end namespace + +/* Fixture tests */ + +TEST_F(SVMLightRunnerTest, processRequest_learning) { + std::cout << "SVMConfiguration data..." << std::endl; + svm_config.data = learning_data_01; + std::cout << "SVMConfiguration target..." << std::endl; + svm_config.target = learning_target_01; + std::cout << "SVMConfiguration setPrediction..." << std::endl; + svm_config.setPrediction(false); + std::cout << "Processing request." << std::endl; + svmlr.processRequest(svm_config); + + // kernel_type - LINEAR + ASSERT_EQ(svm_config.kernel_type, 0); + // -d int -> parameter d in polynomial kernel + ASSERT_EQ(svm_config.degree, 3); + // -g float -> parameter gamma in rbf kernel + ASSERT_EQ(svm_config.gamma, 1); + // -s float -> parameter s in sigmoid/poly kernel + ASSERT_EQ(svm_config.coef0, 1); + // -r float -> parameter c in sigmoid/poly kernel + ASSERT_EQ(svm_config.C, 1); + // highest feature index - no assignment to read-only data + ASSERT_EQ(svm_config.data.n_cols, 4); + // number of support vectors + ASSERT_EQ(svm_config.l, 3); + // threshold b + ASSERT_DOUBLE_EQ(svm_config.b, 2.8710367416806548e-13); +} + +TEST_F(SVMLightRunnerTest, processRequest_classification) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.data = learning_data_01; + svm_config.target = learning_target_01; + svm_config.setPrediction(false); + svmlr.processRequest(svm_config); + + std::cout << "Testing prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + svmlr.processRequest(svm_config); + + for (int i = 0; i < 4; ++i) { + ASSERT_DOUBLE_EQ(svm_config.result[i], testing_target_01[i]); + } +} + +TEST_F(SVMLightRunnerTest, test_globals_cleaning) { + std::cout << "Testing whether globals are being initialized properly" + << std::endl << std::flush; + extern double *primal; + ASSERT_EQ(primal, null_double_ptr); + extern double *dual; + ASSERT_EQ(dual, null_double_ptr); + extern long precision_violations; + ASSERT_EQ(precision_violations, null_long); + extern double *buffer; + ASSERT_EQ(buffer, null_double_ptr); + extern long *nonoptimal; + ASSERT_EQ(nonoptimal, null_long_ptr); + extern long smallroundcount; + ASSERT_EQ(smallroundcount, null_long); + extern long roundnumber; + ASSERT_EQ(roundnumber, null_long); + extern long kernel_cache_statistic; + ASSERT_EQ(kernel_cache_statistic, null_long); +} + + +TEST_F(SVMLightRunnerTest, processRequest_with_poly_kernel) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.setKernel(std::string("poly")); + svm_config.data = learning_data_01; + svm_config.target = learning_target_02; + svm_config.setPrediction(false); + svmlr.processRequest(svm_config); + + std::cout << "Testing prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + svmlr.processRequest(svm_config); + + for (int i = 0; i < 4; ++i) { + ASSERT_DOUBLE_EQ(svm_config.result[i], testing_target_02[i]); + } +} + +TEST_F(SVMLightRunnerTest, processRequest_with_rbf_kernel) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.setKernel(std::string("rbf")); + svm_config.data = learning_data_01; + svm_config.target = learning_target_02; + svm_config.setPrediction(false); + svmlr.processRequest(svm_config); + + std::cout << "Testing prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + svmlr.processRequest(svm_config); + + for (int i = 0; i < 4; ++i) { + ASSERT_DOUBLE_EQ(svm_config.result[i], testing_target_02[i]); + } +} + +TEST_F(SVMLightRunnerTest, processRequest_with_sigmoid_kernel) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.setKernel(std::string("sigmoid")); + svm_config.data = learning_data_01; + svm_config.target = learning_target_02; + svm_config.setPrediction(false); + /* C calulated by SVMLight: [avg. x*x]^-1 */ + svm_config.C = 2.8411; + svmlr.processRequest(svm_config); + + std::cout << "Testing prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + svmlr.processRequest(svm_config); + + for (int i = 0; i < 4; ++i) { + ASSERT_DOUBLE_EQ(svm_config.result[i], testing_target_02[i]); + } +} + +TEST_F(SVMLightRunnerTest, integration_svmclient_predict) { + std::cout << "Testing learning..." << std::endl << std::flush; + svm_config.data = learning_data_01; + svm_config.target = learning_target_01; + svm_config.setPrediction(false); + // We must do it that way because we are testing SVMLightRunner here :) + svmlr.processRequest(svm_config); + + std::cout << "Testing SVMClient prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + SVMClient *svm_client = new SVMClient(&svm_config); + svm_client->predict(testing_data_01); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < testing_target_01.n_rows; ++i) { + ASSERT_DOUBLE_EQ( + client_config.result[i], testing_target_01[i]); + } +} + +TEST_F(SVMLightRunnerTest, integration_svmclient_sparse_predict) { + std::cout << "Testing learning..." << std::endl << std::flush; + // Sparse matrix is currently being held as transposed + svm_config.sparse_data = sparse_matrix_csc_01_sp_mat.t(); + svm_config.target = sparse_matrix_csc_01_learning_target; + svm_config.setPrediction(false); + svm_config.setSparse(true); + svmlr.processRequest(svm_config); + + std::cout << "Testing SVMClient sparse prediction..." << std::endl << std::flush; + svm_config.setPrediction(true); + + SVMClient *svm_client = new SVMClient(&svm_config); + ASSERT_EQ(svm_client->isSparse(), true); + + svm_client->sparse_predict( + sparse_matrix_csc_01_column_pointers, + sparse_matrix_csc_01_row_indices, + sparse_matrix_csc_01_values, + sparse_matrix_csc_01_nrow, + sparse_matrix_csc_01_ncol + ); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < sparse_matrix_csc_01_learning_target.n_rows; ++i) { + ASSERT_DOUBLE_EQ( + client_config.result[i], sparse_matrix_csc_01_learning_target[i]); + } +} + +TEST_F(SVMLightRunnerTest, svm_options_01) { + std::cout << "Learning..." << std::endl << std::flush; + svm_config.data = learning_data_01; + svm_config.target = learning_target_01; + svm_config.setPrediction(false); + svm_config.svm_options = "-c 0.25"; + svmlr.processRequest(svm_config); + + std::cout << "Testing SVMClient prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + SVMClient *svm_client = new SVMClient(&svm_config); + svm_client->predict(testing_data_01); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < testing_target_01.n_rows; ++i) { + // NOTE: `svm_predict` after `svm_learn -c 0.25` predicts 1 + // (incorrectly) for all of this data + ASSERT_DOUBLE_EQ(client_config.result[i], 1); + } +} + +TEST_F(SVMLightRunnerTest, svm_options_02) { + std::cout << "Learning..." << std::endl << std::flush; + svm_config.data = learning_data_01; + svm_config.target = learning_target_01; + svm_config.setPrediction(false); + svm_config.svm_options = "-c 1"; + svmlr.processRequest(svm_config); + + std::cout << "Testing SVMClient prediction..." << std::endl << std::flush; + svm_config.data = testing_data_01; + svm_config.setPrediction(true); + SVMClient *svm_client = new SVMClient(&svm_config); + svm_client->predict(testing_data_01); + SVMConfiguration client_config = svm_client->getConfiguration(); + + for (int i = 0; i < testing_target_01.n_rows; ++i) { + // NOTE: `svm_predict` after `svm_learn -c 1` predicts with good + // accuracy for this data + ASSERT_DOUBLE_EQ(client_config.result[i], testing_target_01[i]); + } +} diff --git a/tests/cpp/utils/utils_test.cpp b/tests/cpp/utils/utils_test.cpp new file mode 100644 index 00000000..b52049d0 --- /dev/null +++ b/tests/cpp/utils/utils_test.cpp @@ -0,0 +1,85 @@ +#include +#include +#include "gtest/gtest.h" + +#include "utils/utils.h" + +namespace { + +/* Fixture */ +class UtilsTest: public ::testing::Test { + +protected: + + UtilsTest() {} + virtual ~UtilsTest() {} + + /// Called immediately after the constructor (righ before each test) + virtual void SetUp() {} + + /// Called immediately after each test (right before the destructor) + virtual void TearDown() {} + + /* Objects declared here can be used by all tests in the test case */ + std::string sample_args[5] { + std::string(""), + std::string("-a"), + std::string("-d 4.51"), + std::string("filename1 filename2 -a 1 -b 0 -c"), + std::string("filename1 filename2 -a \t 1 -b 0\t\t-c") + }; + + int sample_args_count[5] { + 0, + 1, + 2, + 7, + 7 + }; + + char sample_args_argv[5][7][10] = { + {}, + {"-a"}, + {"-d", "4.51"}, + {"filename1", "filename2", "-a", "1", "-b", "0", "-c"}, + {"filename1", "filename2", "-a", "1", "-b", "0", "-c"} + }; + +}; + +} // end namespace + +/* Fixture tests */ + +TEST_F(UtilsTest, check_argc) { + int sample_num = sizeof(sample_args)/sizeof(*sample_args); + for (int i = 0; i < sample_num; ++i) { + ASSERT_EQ(check_argc(sample_args[i].c_str()), sample_args_count[i]); + ASSERT_EQ(check_argc(sample_args[i]), sample_args_count[i]); + } +} + +TEST_F(UtilsTest, to_argv) { + int sample_num = sizeof(sample_args)/sizeof(*sample_args); + for (int i = 0; i < sample_num; ++i) { + int argc = check_argc(sample_args[i].c_str()); + char** argv = to_argv(sample_args[i].c_str()); + + for(int j = 0; j < argc; ++j) { + ASSERT_EQ( + std::string(argv[j]), + std::string(sample_args_argv[i][j]) + ); + } + } +} + +TEST_F(UtilsTest, free_argv) { + int sample_num = sizeof(sample_args)/sizeof(*sample_args); + for (int i = 0; i < sample_num; ++i) { + int argc = check_argc(sample_args[i].c_str()); + char** argv = to_argv(sample_args[i].c_str()); + argv = free_argv(argc, argv); + ASSERT_TRUE(argv == NULL); + } +} diff --git a/tests/not-working/test_comparisive_svm_sparse.R b/tests/not-working/test_comparisive_svm_sparse.R new file mode 100644 index 00000000..dce61a39 --- /dev/null +++ b/tests/not-working/test_comparisive_svm_sparse.R @@ -0,0 +1,61 @@ +library(gmum.r) + +verbose <- TRUE # set true for local testing + +x_file <- system.file("inst", "data_sets", "svm", "mushrooms.x", mustWork=TRUE, package="gmum.r") +y_file <- system.file("inst", "data_sets", "svm", "mushrooms.y", mustWork=TRUE, package="gmum.r") + +if (!file.exists(x_file) || !file.exists(y_file)) { + stop("Use script 'download_mushrooms/sh' to get the dataset") +} + +x <- read.matrix.csr(system.file("inst", "data_sets", "svm", "mushrooms.x", mustWork=TRUE, package="gmum.r")) +y <- read.table(system.file("inst", "data_sets", "svm", "mushrooms.y", mustWork=TRUE, package="gmum.r")) +y <- as.factor(unlist(y)) + + +libs <- c("svmlight", "libsvm") +kernels <- c("linear", "poly", "rbf") + +for (lib_i in libs) { + for (kernel_i in kernels) { + + train_start <- Sys.time() + svm <- SVM(x=x, y=y, core=lib_i, kernel=kernel_i, C=1, gamma=1, verbosity=0) + gmum_train_time <- Sys.time() - train_start + + test_time <- Sys.time() + gmum_pred <- predict(svm, x) + gmum_test_time <- Sys.time() - test_time + + gmum_acc <- svm.accuracy(prediction=gmum_pred, target=y) + + if (kernel_i == "rbf") kernel_i = "radial" + train_time <- Sys.time() + e_svm <- e1071::svm(x=x, y=y, type='C-classification', kernel=kernel_i, cost=1, gamma=1, scale=FALSE, fitted=FALSE) + e_train_time <- Sys.time() - train_time + + test_time <- Sys.time() + e_pred <- as.integer(predict(e_svm, x)) + e_test_time <- Sys.time() - test_time + + e_acc <- svm.accuracy(e_pred, y) + + if (verbose) { + print(sprintf("gmum.r %s %s acc: %.3f", lib_i, kernel_i, gmum_acc)) + print(sprintf("e1071 %s acc: %.3f", kernel_i, e_acc)) + print("---") + print(sprintf("gmum.r %s %s train time: %.2f", lib_i, kernel_i, gmum_train_time)) + print(sprintf("e1071 %s train time: %.2f", kernel_i, e_train_time)) + print("---") + print(sprintf("gmum.r %s %s test time: %.2f", lib_i, kernel_i, gmum_test_time)) + print(sprintf("e1071 %s test time: %.2f", kernel_i, e_test_time)) + print("---") + print(sprintf("gmum.r %s %s nSV: %i", lib_i, kernel_i, svm$numberSV)) + print(sprintf("e1071 %s nSV: %i", kernel_i, nrow(e_svm$SV))) + print("---") + print("======================================") + } + + } +} diff --git a/tests/not-working/test_svm_news20.R b/tests/not-working/test_svm_news20.R new file mode 100644 index 00000000..37854c50 --- /dev/null +++ b/tests/not-working/test_svm_news20.R @@ -0,0 +1,55 @@ +library(gmum.r) +library(caret) + +data_file <- system.file("data_sets", "svm", "large", "news20.RData", mustWork=TRUE, package="gmum.r") + +if (!file.exists(data_file)) { + stop("Missing dataset file, get it here https://www.dropbox.com/s/wm1qtwewss4ab08/news20.RData?dl=0") +} + +load(data_file) + +x <- news20$x +y <- news20$y + +library(caret) +set.seed(666) +train <- as.integer(createDataPartition(y, p=0.5, list=FALSE)) + +core <- "svmlight" +kernel <- "linear" + +library(e1071) + +train_start <- proc.time() +gmum_svm <- SVM(x[train],y[train], core=core, kernel=kernel, C=1, gamma=1) +gmum_train <- as.numeric((proc.time() - train_start)[3]) + +if (kernel=="rbf") kernel <- "radial" +train_start <- proc.time() +e_svm <- e1071::svm(x=x[train], y=y[train], type='C-classification', kernel=kernel, cost=1, gamma=1, scale=FALSE, fitted=FALSE) +e_train <- as.numeric((proc.time() - train_start)[3]) + + +test_start <- proc.time() +gmum_pred <- predict(gmum_svm, x[-train]) +gmum_test <- as.numeric((proc.time() - test_start)[3]) + +test_start <- proc.time() +e_pred <- predict(e_svm, x[-train]) +e_test <- as.numeric((proc.time() - test_start)[3]) + +gmum_acc <- svm.accuracy(y[-train], gmum_pred) +e_acc <- svm.accuracy(y[-train], e_pred) + +print(sprintf("gmum core: %s", core)) +print(sprintf("kernel: %s", kernel)) + +print(sprintf("gmum acc: %.4f", gmum_acc)) +print(sprintf("e1071 acc: %.4f", e_acc)) + +print(sprintf("gmum train time: %.4f", gmum_train)) +print(sprintf("e1071 train time: %.4f", e_train)) + +print(sprintf("gmum test time: %.4f", gmum_test)) +print(sprintf("e1071 test time: %.4f", e_test)) diff --git a/tests/not-working/test_svm_sparse_news20_part.R b/tests/not-working/test_svm_sparse_news20_part.R new file mode 100644 index 00000000..c2acf8a9 --- /dev/null +++ b/tests/not-working/test_svm_sparse_news20_part.R @@ -0,0 +1,65 @@ +library(gmum.r) +library(caret) +library(testthat) + +test_that("svmlight is faster than e1071 on big sparse data", { + data_file <- system.file("data_sets", "svm", "large", "news20_part.RData", mustWork=FALSE, package="gmum.r") + if (!file.exists(data_file)) { + warning("Missing dataset file, get it here http://gmum.net/files/gmum.r/data/news20_part.RData") + } else { + load(data_file) + + x <- news20.part$x + y <- news20.part$y + + library(caret) + set.seed(666) + train <- as.integer(createDataPartition(y, p=0.2, list=FALSE)) + + core <- "svmlight" + kernel <- "linear" + + library(e1071) + + print("traning on news20...") + + train_start <- proc.time() + gmum_svm <- SVM(x[train],y[train], core=core, kernel=kernel, C=1, gamma=1) + gmum_train <- as.numeric((proc.time() - train_start)[3]) + + if (kernel=="rbf") kernel <- "radial" + train_start <- proc.time() + e_svm <- e1071::svm(x=x[train], y=y[train], type='C-classification', kernel=kernel, cost=1, gamma=1, scale=FALSE, fitted=FALSE) + e_train <- as.numeric((proc.time() - train_start)[3]) + + print("testing on news20...") + + test_start <- proc.time() + gmum_pred <- predict(gmum_svm, x[-train]) + gmum_test <- as.numeric((proc.time() - test_start)[3]) + + test_start <- proc.time() + e_pred <- predict(e_svm, x[-train]) + e_test <- as.numeric((proc.time() - test_start)[3]) + + gmum_acc <- svm.accuracy(y[-train], gmum_pred) + e_acc <- svm.accuracy(y[-train], e_pred) + + print(sprintf("gmum core: %s", core)) + print(sprintf("kernel: %s", kernel)) + + print(sprintf("gmum acc: %.4f", gmum_acc)) + print(sprintf("e1071 acc: %.4f", e_acc)) + + print(sprintf("gmum train time: %.4f", gmum_train)) + print(sprintf("e1071 train time: %.4f", e_train)) + + print(sprintf("gmum test time: %.4f", gmum_test)) + print(sprintf("e1071 test time: %.4f", e_test)) + + expect_true(abs(gmum_acc - e_acc) < 1e-2) + expect_true(gmum_train < e_train) + expect_true(gmum_test < e_test) + } +}) + diff --git a/tests/test-all.R b/tests/test-all.R new file mode 100644 index 00000000..46f33941 --- /dev/null +++ b/tests/test-all.R @@ -0,0 +1,31 @@ +library(testthat) + +is.installed <- function(mypkg){ + is.element(mypkg, installed.packages()[,1]) +} + +if(!is.installed("klaR")){ + install.packages("klaR",repos='http://cran.us.r-project.org') +} + +if(!is.installed("mlbench")){ + install.packages("mlbench",repos='http://cran.us.r-project.org') +} + +if(!is.installed("rattle")){ + install.packages("rattle",repos='http://cran.us.r-project.org') +} + +if(!is.installed("caret")){ + install.packages("caret",repos='http://cran.us.r-project.org') +} + +if(!is.installed("e1071")){ + install.packages("e1071",repos='http://cran.us.r-project.org') +} + +if(!is.installed("MASS")){ + install.packages("MASS",repos='http://cran.us.r-project.org') +} + +test_check('gmum.r') diff --git a/tests/testthat/README.md b/tests/testthat/README.md new file mode 100644 index 00000000..e4183597 --- /dev/null +++ b/tests/testthat/README.md @@ -0,0 +1,29 @@ +Testthat Tests +============== + +This directory contains R code tests. + +## Dependencies + +* caret package +* e1071 package +* klaR package (and svmlight binaries in path, download from http://svmlight.joachims.org/. For Windows +make sure you add appropriate path to PATH variable or drop them into root folder of package) +* mlbench package + +### Testthat + +In order to run these tests you need to install **testthat** R package. + +``` +install.packages("testthat") +``` + +## Run tests + +To run all tests use devtools command: + +``` +devtools::test() +``` + diff --git a/tests/testthat/cec_function.R b/tests/testthat/cec_function.R new file mode 100644 index 00000000..75e40a75 --- /dev/null +++ b/tests/testthat/cec_function.R @@ -0,0 +1,3 @@ +f_standard <- function(m, sigma){ + return (m * log(2 * pi * exp(1)) / 2 + log(det(sigma)) / 2) +} diff --git a/tests/testthat/combinations.R b/tests/testthat/combinations.R new file mode 100644 index 00000000..5b078c92 --- /dev/null +++ b/tests/testthat/combinations.R @@ -0,0 +1,120 @@ +# $Id: combinations.R 1083 2007-03-23 22:53:00Z warnes $ +# + +## +## From email by Brian D Ripley to r-help +## dated Tue, 14 Dec 1999 11:14:04 +0000 (GMT) in response to +## Alex Ahgarin . Original version was +## named "subsets" and was Written by Bill Venables. +## + +combinations <- function(n, r, v = 1:n, set = TRUE, repeats.allowed=FALSE) +{ + if(mode(n) != "numeric" || length(n) != 1 + || n < 1 || (n %% 1) != 0) stop("bad value of n") + if(mode(r) != "numeric" || length(r) != 1 + || r < 1 || (r %% 1) != 0) stop("bad value of r") + if(!is.atomic(v) || length(v) < n) + stop("v is either non-atomic or too short") + if( (r > n) & repeats.allowed==FALSE) + stop("r > n and repeats.allowed=FALSE") + if(set) { + v <- unique(sort(v)) + if (length(v) < n) stop("too few different elements") + } + v0 <- vector(mode(v), 0) + ## Inner workhorse + if(repeats.allowed) + sub <- function(n, r, v) + { + if(r == 0) v0 else + if(r == 1) matrix(v, n, 1) else + if(n == 1) matrix(v, 1, r) else + rbind( cbind(v[1], Recall(n, r-1, v)), + Recall(n-1, r, v[-1])) + } + else + sub <- function(n, r, v) + { + if(r == 0) v0 else + if(r == 1) matrix(v, n, 1) else + if(r == n) matrix(v, 1, n) else + rbind(cbind(v[1], Recall(n-1, r-1, v[-1])), + Recall(n-1, r, v[-1])) + } + sub(n, r, v[1:n]) +} + +## +## Original version by Bill Venables and cited by by Matthew +## Wiener (mcw@ln.nimh.nih.gov) in an email to R-help dated +## Tue, 14 Dec 1999 09:11:32 -0500 (EST) in response to +## Alex Ahgarin +## +## + + +gtools_permutations <- function(n, r, v = 1:n, set = TRUE, repeats.allowed=FALSE) +{ + if(mode(n) != "numeric" || length(n) != 1 + || n < 1 || (n %% 1) != 0) stop("bad value of n") + if(mode(r) != "numeric" || length(r) != 1 + || r < 1 || (r %% 1) != 0) stop("bad value of r") + if(!is.atomic(v) || length(v) < n) + stop("v is either non-atomic or too short") + if( (r > n) & repeats.allowed==FALSE) + stop("r > n and repeats.allowed=FALSE") + if(set) { + v <- unique(sort(v)) + if (length(v) < n) stop("too few different elements") + } + v0 <- vector(mode(v), 0) + ## Inner workhorse + if(repeats.allowed) + sub <- function(n, r, v) + { + if(r==1) matrix(v,n,1) else + if(n==1) matrix(v,1,r) else + { + inner <- Recall(n, r-1, v) + cbind( rep( v, rep(nrow(inner),n) ), + matrix( t(inner), ncol=ncol(inner), nrow=nrow(inner) * n , + byrow=TRUE ) + ) + } + } + else + sub <- function(n, r, v) + { + if(r==1) matrix(v,n,1) else + if(n==1) matrix(v,1,r) else + { + X <- NULL + for(i in 1:n) + X <- rbind( X, cbind( v[i], Recall(n-1, r - 1, v[-i]))) + X + } + } + + sub(n, r, v[1:n]) +} + +correctness <- function(correct_assignment, my_assignment, npoints, nclusters) +{ + perms <- gtools_permutations(v=0:(nclusters-1), n=nclusters, r=nclusters, repeats.allowed=F) + nperms <- dim(perms)[1] + best_percentage <- 0 + + for(i in 1:nperms) + { + ncorrect <- 0 + for(j in 1:npoints) { + if(correct_assignment[j] == perms[i, my_assignment[j] + 1]) { + ncorrect <- ncorrect + 1 + } + } + correct_percentage <- ncorrect / npoints + best_percentage <- max(correct_percentage, best_percentage) + } + return(best_percentage) +} diff --git a/tests/testthat/download_mushrooms.sh b/tests/testthat/download_mushrooms.sh new file mode 100755 index 00000000..db4c28e4 --- /dev/null +++ b/tests/testthat/download_mushrooms.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +cd ../../inst/dev/datasets/svm +wget http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/mushrooms +cut -d " " -f 2- mushrooms > mushrooms.x +cut -d " " -f -1 mushrooms > mushrooms.y +rm mushrooms diff --git a/tests/testthat/download_svm_data.R b/tests/testthat/download_svm_data.R new file mode 100644 index 00000000..77a3f375 --- /dev/null +++ b/tests/testthat/download_svm_data.R @@ -0,0 +1,30 @@ +svm.data.root.local <- file.path("inst", "dev", "datasets", "svm") +colon_cancer.filename <- "colon-cancer" +colon_cancer.url <- "http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/colon-cancer.bz2" + +svm.download.colon_cancer <- function() { + svm.download.dataset(colon_cancer.url, colon_cancer.filename) +} + +#right now is working only wiz bzip2 +svm.download.dataset <- function(url, filename, root=svm.data.root.local) { + destfile <- file.path(root,filename) + if(!file.exists(destfile)) { + filename_download <- basename(url) + ext <- strsplit(filename_download, "\\.")[[1]][2] + ext <- switch(ext, + bz2 = "bzip2") + destfile_download <- file.path(root,filename_download) + download.file(url,destfile=destfile_download) + destfile_con <- file(destfile, "w") + destfile_download_con <- bzfile(destfile_download) + writeLines(readLines(destfile_download_con, warn=FALSE), destfile_con) + unlink(destfile_download) + close(destfile_con) + close(destfile_download_con) + } else { + print("Everything ok"); + } +} + +svm.download.colon_cancer() diff --git a/tests/testthat/svm_plot_test.R b/tests/testthat/svm_plot_test.R new file mode 100644 index 00000000..6e6f5cb1 --- /dev/null +++ b/tests/testthat/svm_plot_test.R @@ -0,0 +1,58 @@ +library(gmum.r) + +# TO USE: ctrl + a -> ctrl + enter + +data(svm_two_ellipsoids_dataset) +data(svm_two_circles_dataset) + +formula <- V3 ~ . +########## two elipsoids plots ######### +ds_2e <- svm.twoellipsoids.dataset + +svm_2e <- SVM(formula, ds_2e, core="libsvm", kernel="linear", prep = "2e", C=10); +svm <- SVM(formula, ds_2e, core="libsvm", kernel="linear", prep = "none", C=10); + +plot(svm, mode="normal") +plot(svm_2e, mode="normal") +plot(svm_2e, mode="contour") + +########## circles plots ######### +ds_circles <- svm.twocircles.dataset + +svm_2e <- SVM(formula, ds_circles, core="libsvm", kernel="linear", prep = "2e", C=10); +svm <- SVM(formula, ds_circles, core="libsvm", kernel="linear", prep = "none", C=10); + +plot(svm, mode="normal") +plot(svm_2e, mode="normal") +plot(svm_2e, mode="contour") + + +# multiclass plot + +set.seed(666) + +centers <- list(c(0,0), + c(10,0), + c(0,10), + c(3,3)) + +sigmas <- list(matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 1, 1), nrow=2)) + +sizes <- list(100, 100, 100, 101) + +n <- length(centers) +df <- c() +for(i in 1:n){ + df <- rbind(df, cbind(mvrnorm(n=sizes[[i]], mu=centers[[i]], Sigma=sigmas[[i]]), rep(i,sizes[[i]]))) +} +df <- data.frame(df) +colnames(df) <- c("x1","x2", "y") + +df[,3] <- as.factor(df[,3]) + +svm <- SVM(x=df[,1:2], y=df[,3], class.type="one.versus.all") + +plot(svm) diff --git a/tests/testthat/test_cec_centroid_assignment.R b/tests/testthat/test_cec_centroid_assignment.R new file mode 100644 index 00000000..76615320 --- /dev/null +++ b/tests/testthat/test_cec_centroid_assignment.R @@ -0,0 +1,37 @@ +library(testthat) +library('gmum.r') + +source('combinations.R') + +test_that("EllipseGauss centroid assignment is correct", { + + data(cec_ellipse_gauss) + + expected_energy <- cec.ellipsegauss.extra[["energy"]] + dataset_clusters <- cec.ellipsegauss.extra[["cluster"]] + dataset_points <- cec.ellipsegauss + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 20 + accepted <- 0 + nclusters <- 4 + npoints = dim(dataset_points)[1] + for(i in 1:t) + { + c1 <- CEC(k=nclusters, x=dataset_points, method.init='random', control.itmax=0, control.nstart=1, seed=13371337) + centers <- c1$centers + centers_len = length(centers) + c2 <- CEC(k=centers_len, x=dataset_points, method.init='centroids', params.centroids=centers) + c3 <- CEC(k=centers_len, x=dataset_points, method.init='centroids', params.centroids=centers) + expect_that(c2$energy, equals(c3$energy, tolerance = 0)) + actual_energy = c2$energy + correct_percentage <- correctness(dataset_clusters, c2$clustering, npoints, centers_len) + if(isTRUE(all.equal(current = actual_energy, target=expected_energy, tolerance=0.2)) | (correct_percentage >= 0.9) ) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + + print("test_centroid_assignment: EllipseGauss centroid assignment is correct") +}) diff --git a/tests/testthat/test_cec_control_eps_boundary_values.R b/tests/testthat/test_cec_control_eps_boundary_values.R new file mode 100644 index 00000000..bb846878 --- /dev/null +++ b/tests/testthat/test_cec_control_eps_boundary_values.R @@ -0,0 +1,68 @@ +library(testthat) +library('gmum.r') + +test_that("mouse_1_spherical kmeans test control eps boundary values", { + data(cec.mouse1.spherical) + dataset_points <- cec.mouse1.spherical + nclusters <- 3 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='kmeans++', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("mouse_1_spherical kmeans test control eps boundary values is correct") +}) + +test_that("mouse_1_spherical random test control eps boundary values", { + data(cec.mouse1.spherical) + dataset_points <- cec.mouse1.spherical + nclusters <- 3 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='random', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("mouse_1_spherical random test control eps boundary values is correct") +}) + +test_that("EllipseGauss kmeans test control eps boundary values", { + data(cec.ellipsegauss) + dataset_points <- cec.ellipsegauss + nclusters <- 4 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='kmeans++', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("EllipseGauss kmeans test control eps boundary values is correct") +}) + +test_that("EllipseGauss random test control eps boundary values", { + data(cec.ellipsegauss) + dataset_points <- cec.ellipsegauss + nclusters <- 4 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='random', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("EllipseGauss random test control eps boundary values is correct") +}) + +test_that("mouse_1 kmeans test control eps boundary values", { + data(cec.mouse1) + dataset_points <- cec.mouse1 + nclusters <- 3 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='kmeans++', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("mouse_1 kmeans test control eps boundary values is correct") +}) + +test_that("mouse_1 random test control eps boundary values", { + data(cec.mouse1) + dataset_points <- cec.mouse1 + nclusters <- 3 + npoints = dim(dataset_points)[1] + c <- CEC(k=nclusters, x=dataset_points, control.nstart=1, method.init='random', control.eps=((npoints - 1) / npoints), seed=13371337) + final_nclusters = tail(c$logNumberOfClusters, n=1) + expect_that(final_nclusters == 1, is_true()) + print("mouse_1 random test control eps boundary values is correct") +}) diff --git a/tests/testthat/test_cec_entropy_simple.R b/tests/testthat/test_cec_entropy_simple.R new file mode 100644 index 00000000..25628e99 --- /dev/null +++ b/tests/testthat/test_cec_entropy_simple.R @@ -0,0 +1,15 @@ +library(testthat) +library('gmum.r') + +test_that("Entropy is correct", { + data(cec_simple_1) + + expected_energy <- cec.simple1.extra[["energy"]] + dataset_points <- cec.simple1 + + c <- CEC(k=1, x=dataset_points, seed=13371337) + actual_energy <- c$energy + + expect_that(actual_energy, equals(expected_energy, tolerance = 1e-4)) + print("test_entropy_simple: Entropy is correct") +}) diff --git a/tests/testthat/test_cec_func_method_type.R b/tests/testthat/test_cec_func_method_type.R new file mode 100644 index 00000000..d58afef6 --- /dev/null +++ b/tests/testthat/test_cec_func_method_type.R @@ -0,0 +1,24 @@ +library(testthat) +library('gmum.r') + +source("cec_function.R") + +data(cec.simple1) + +expected_energy <- cec.simple1.extra[["energy"]] +dataset_points <- cec.simple1 + +test_that("Entropy is correct", { + c1 <- CEC(k=1, x=dataset_points, method.type='func', params.function=f_standard, seed=13371337) + c2 <- CEC(k=1, x=dataset_points, method.type='standard', seed=13371337) + + plot(c1) + plot(c2) + + actual_energy_func_type <- c1$energy + actual_energy_standart_type <- c2$energy + + expect_that(actual_energy_func_type, equals(actual_energy_standart_type, tolerance = 1e-4)) + expect_that(actual_energy_func_type, equals(expected_energy, tolerance = 1e-4)) + print("test_func_method_type: Entropy is correct") +}) diff --git a/tests/testthat/test_cec_kmeans_assignment.R b/tests/testthat/test_cec_kmeans_assignment.R new file mode 100644 index 00000000..0e511cdc --- /dev/null +++ b/tests/testthat/test_cec_kmeans_assignment.R @@ -0,0 +1,70 @@ +library(testthat) +library('gmum.r') +source('combinations.R') + +test_that("correctness works", { + data(cec_ellipse_gauss) + + dataset_clusters <- cec.ellipsegauss.extra[["cluster"]] + dataset_points <- cec.ellipsegauss + + nclusters <- 4 + npoints <- dim(dataset_points)[1] + + correct_percentage <- correctness(dataset_clusters, dataset_clusters, npoints, nclusters) + expect_that(correct_percentage, equals(1)) + print("test_kmeans_assignment: correctness works") +}) + +test_that("mouse_1_spherical kmeans coverage is correct", { + data(cec.mouse1.spherical) + + expected_energy <- cec.mouse1.spherical.extra[["energy"]] + dataset_clusters <- cec.mouse1.spherical.extra[["cluster"]] + dataset_points <- cec.mouse1.spherical + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 100 + accepted <- 0 + nclusters <- 3 + npoints = dim(dataset_points)[1] + for(i in 1:t) + { + c <- CEC(k=3, x=dataset_points, control.nstart=1, method.type='spherical', method.init='kmeans++', seed=13371337) + plot(c) + correct_percentage <- correctness(dataset_clusters, c$clustering, npoints, nclusters) + if(correct_percentage == 1) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + print("test_kmeans_assignment: mouse_1_spherical kmeans coverage is correct") +}) + +test_that("mouse_1_spherical kmeans energy is correct", { + data(cec.mouse1.spherical) + + expected_energy <- cec.mouse1.spherical.extra[["energy"]] + dataset_clusters <- cec.mouse1.spherical.extra[["cluster"]] + dataset_points <- cec.mouse1.spherical + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 100 + accepted <- 0 + nclusters <- 3 + npoints = dim(dataset_points)[1] + for(i in 1:t) + { + c <- CEC(k=3, x=dataset_points, control.nstart=1, method.type='spherical', method.init='kmeans++', seed=13371337) + plot(c) + actual_energy <- c$energy + + if(isTRUE(all.equal(current = actual_energy, target=expected_energy, tolerance=.0001))) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + print("test_kmeans_assignment: mouse_1_spherical kmeans energy is correct") +}) diff --git a/tests/testthat/test_cec_random_assignment.R b/tests/testthat/test_cec_random_assignment.R new file mode 100644 index 00000000..2b3cded6 --- /dev/null +++ b/tests/testthat/test_cec_random_assignment.R @@ -0,0 +1,96 @@ +library(testthat) +library('gmum.r') + +source('combinations.R') + +test_that("correctness works", { + data(cec_ellipse_gauss) + + dataset_clusters <- cec.ellipsegauss.extra[["cluster"]] + dataset_points <- cec.ellipsegauss + + nclusters <- 4 + npoints <- dim(dataset_points)[1] + + correct_percentage <- correctness(dataset_clusters, dataset_clusters, npoints, nclusters) + expect_that(correct_percentage, equals(1)) + print("test_random_assignment: correctness works") +}) + +test_that("EllipseGauss random assignment is correct", { + data(cec_ellipse_gauss) + + expected_energy <- cec.ellipsegauss.extra[["energy"]] + dataset_clusters <- cec.ellipsegauss.extra[["cluster"]] + dataset_points <- cec.ellipsegauss + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 20 + accepted <- 0 + nclusters <- 4 + npoints = dim(dataset_points)[1] + for(i in 1:t) + { + c <- CEC(k=nclusters, x=dataset_points, method.init='random', seed=13371337) + plot(c) + correct_percentage <- correctness(dataset_clusters, c$clustering, npoints, nclusters) + if(isTRUE(all.equal(current = c$energy, target=expected_energy, tolerance=0.25)) | (correct_percentage >= 0.9) ) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + print("test_random_assignment: EllipseGauss random assignment is correct") +}) + +test_that("mouse_1 random assignment is correct", { + data(cec.mouse1) + + expected_energy <- cec.mouse1.extra[["energy"]] + dataset_clusters <- cec.mouse1.extra[["cluster"]] + dataset_points <- cec.mouse1 + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 20 + accepted <- 0 + nclusters <- 3 + npoints <- dim(dataset_points)[1] + for(i in 1:t) + { + c <-CEC(k=nclusters, x=dataset_points, method.init='random', seed=13371337) + plot(c) + correct_percentage <- correctness(dataset_clusters, c$clustering, npoints, nclusters) + if(isTRUE(all.equal(current = c$energy, target=expected_energy, tolerance=0.2)) | (correct_percentage >= 0.9) ) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + print("test_random_assignment: mouse_1 random assignment is correct") +}) + +test_that("mouse_1_spherical random assignment is correct", { + data(cec.mouse1.spherical) + + expected_energy <- cec.mouse1.spherical.extra[["energy"]] + dataset_clusters <- cec.mouse1.spherical.extra[["cluster"]] + dataset_points <- cec.mouse1.spherical + + dataset_clusters <- dataset_clusters - min(dataset_clusters) + + t <- 20 + accepted <- 0 + nclusters <- 3 + npoints <- dim(dataset_points)[1] + for(i in 1:t) + { + c <- CEC(k=nclusters, x=dataset_points, method.type='spherical', method.init='random', seed=13371337) + plot(c) + correct_percentage <- correctness(dataset_clusters, c$clustering, npoints, nclusters) + if(isTRUE(all.equal(current = c$energy, target=expected_energy, tolerance=0.2)) | (correct_percentage >= 0.9) ) { + accepted <- accepted + 1 + } + } + expect_that(accepted > t/2.0, is_true()) + print("test_random_assignment: mouse_1_spherical random assignment is correct") +}) diff --git a/tests/testthat/test_cec_seed.R b/tests/testthat/test_cec_seed.R new file mode 100644 index 00000000..c84fc1e9 --- /dev/null +++ b/tests/testthat/test_cec_seed.R @@ -0,0 +1,21 @@ +library(testthat) +library('gmum.r') + +source('combinations.R') + +test_that("Seed works", { + data(cec_ellipse_gauss) + dataset_points <- cec.ellipsegauss + + c1 <- CEC(k=4, x=dataset_points, method.init='random', seed=1) + c2 <- CEC(k=4, x=dataset_points, method.init='random', seed=2) + expect_that(all(c1$clustering == c2$clustering), is_false()) + expect_that(c1$energy == c2$energy, is_false()) + + c1 <- CEC(k=4, x=dataset_points, method.init='random', seed=1) + c2 <- CEC(k=4, x=dataset_points, method.init='random', seed=1) + expect_that(all(c1$clustering == c2$clustering), is_true()) + expect_that(c1$energy == c2$energy, is_true()) + + print("test_cec_seed: seed works") +}) diff --git a/tests/testthat/test_cec_t_set.R b/tests/testthat/test_cec_t_set.R new file mode 100644 index 00000000..67e10585 --- /dev/null +++ b/tests/testthat/test_cec_t_set.R @@ -0,0 +1,58 @@ +library(testthat) +library('gmum.r') + +test_that("spherical works", { + data(cec.tset) + c <- CEC(x = Tset, k = 10, method.type = 'spherical', control.nstart = 100, control.eps = 0.07, seed=13371337) + plot(c, ellipses = TRUE, centers = TRUE) + + expected_energy = -0.9294208 + + expect_that(all.equal(current = c$energy, target = expected_energy, tolerance = 0.2), is_true()) + print("test_t_set: spherical works") +}) + +test_that("fixed_spherical works", { + data(cec.tset) + c <- CEC(x = Tset, k = 10, method.type = 'fixed_spherical', params.r = 0.01, control.nstart = 100, control.eps = 0.07, seed=13371337) + plot(c, ellipses = TRUE, centers = TRUE) + + expected_energy = -0.6100412 + + expect_that(all.equal(current = c$energy, target = expected_energy, tolerance = 0.2), is_true()) + print("test_t_set: fixed_spherical works") +}) + +test_that("diagonal works", { + data(cec.tset) + c <- CEC(x = Tset, k = 10, method.type = 'diagonal', control.nstart = 100, control.eps = 0.1, seed=13371337) + plot(c, ellipses = TRUE, centers = TRUE) + + expected_energy = -0.9852 + + expect_that(all.equal(current = c$energy, target = expected_energy, tolerance = 0.2), is_true()) + print("test_t_set: diagonal works") +}) + +test_that("fixed_covariance works", { + data(cec.tset) + expected_energy = -0.2362521 + c <- CEC(x = Tset, k = 10, method.type = 'fixed_covariance', params.cov=matrix(c(0.04, 0, 0, 0.01), 2), control.nstart = 100, control.eps = 0.07, seed=2) + plot(c, ellipses = TRUE, centers = TRUE) + expect_that(all.equal(current = c$energy, target = expected_energy, tolerance = 0.2), is_true()) + print("test_t_set: fixed_covariance works") +}) + + +test_that("mix works", { + data(cec.tset) + fixed_spherical_cluster = list(k = 3, method.type = 'fixed_spherical', params.r = 0.001) + fixed_covariance_cluster = list(k = 1, method.type = 'fixed_covariance', params.cov=matrix(c(0.05, 0, 0, 0.001), 2)) + c <- CEC(x = Tset, params.mix = list(fixed_covariance_cluster, fixed_spherical_cluster), control.nstart = 100, control.eps=0.09, seed=13371337) + plot(c, ellipses = TRUE, centers = TRUE) + + expected_energy = 2.459158 + + expect_that(all.equal(current = c$energy, target = expected_energy, tolerance = 0.2), is_true()) + print("test_t_set: mix works") +}) diff --git a/tests/testthat/test_gng_basic.R b/tests/testthat/test_gng_basic.R new file mode 100644 index 00000000..82970864 --- /dev/null +++ b/tests/testthat/test_gng_basic.R @@ -0,0 +1,181 @@ +library(testthat) + +test_that("Basic saving/loading works", { + print("Basic saving/loading works") + g <- GNG(train.online=TRUE, dim=3, verbosity=-1); + insertExamples(g, gng.preset.sphere(300)) + Sys.sleep(1) + pause(g) + + gngSave(g, file='mygraph.bin') + + g2 <- gngLoad("mygraph.bin") + # Check basic deserialization + expect_that(g2$.getConfiguration()$alpha == g$.getConfiguration()$alpha && + g2$.getConfiguration()$eps_n == g$.getConfiguration()$eps_n, is_true()) + + # Check basic equivalency (TODO: check something deeper) + for(i in 1:100){ + point <- runif(3) + expect_that(predict(g, point) == predict(g2, point), is_true()) + } + + file.remove("mygraph.bin") + + expect_that(isRunning(g), is_false()) +}) +# +test_that("predictCluster returns sensible results", { + print("predictCluster returns sensible results") + data(cec.mouse1.spherical) + g <- GNG(cec.mouse1.spherical, max.nodes=50) + mouse_centr <- calculateCentroids(g) + + m = as.data.frame(cec.mouse1.spherical) + colnames(m) = c("x", "y") + + x_col <- cec.mouse1.spherical[,1] + y_col <- cec.mouse1.spherical[,2] + + x_max <- max(x_col) + x_min <- min(x_col) + y_max <- max(y_col) + y_min <- min(y_col) + + x_axis <- seq(from=x_min, to=x_max, length.out=30) + y_axis <- seq(from=y_min, to=y_max, length.out=30) + grid <- data.frame(x_axis,y_axis) + grid <- expand.grid(x=x_axis,y=y_axis) + target <- findClosests(g, node.ids=mouse_centr, x=grid) + target_loopy <- apply(grid, 1, function(x) findClosests(g, node.ids=mouse_centr, x=x)) + + print(target) + print(target_loopy) + + grid["target"] <- target + library(ggplot2) + + + # Equivalent + expect_that(all(target==target_loopy), is_true()) + + # More or less balanced (if fails - might be needed to change threshold) + expect_that(all(sapply(table(target), function(x) x>40)), is_true()) + + # At least catches most important clusters + expect_that(length(calculateCentroids(g)) > 3, is_true()) +}) + +test_that("GNG converges on simple cases", { + print("GNG converges on simple cases") + online_converged <- function(gng){ + n <- 0 + print("Waiting to converge") + while(numberNodes(gng) != gng$.getConfiguration()$max_nodes && n < 100) { + Sys.sleep(1.0) + n <- n + 1 + } + test_that("GNG has reached expected number of vertexes", { + expect_that(n < 100, is_true() ) + }) + } + + sanity_check <- function(gng){ + # Find closest node + predict(gng, c(1,1,1)) + + # # Get igraph + ig <- convertToIGraph(gng) + + # # Running testthat unit tests (almost) + test_that("GNG has not isolated vertexes", { + expect_that(any(degree(ig)==0), is_false()) + }) + + test_that("GNG has converged", { + error_before = meanError(gng) + expect_that(error_before < 50.0/max_nodes, is_true() ) + }) + + # Test memory + terminate(gng) + print("Saving in sanity check:") + gngSave(gng, "graph.bin") + } + + + max_nodes <- 600 + ex <- gng.preset.sphere(N=90000) + gng <- GNG(max.nodes=max_nodes, train.online=TRUE, dim=3, verbosity=3) + insertExamples(gng, ex) + run(gng) + online_converged(gng) + sanity_check(gng) + + gng <- GNG(ex, max.nodes=max_nodes, train.online=TRUE, dim=3, verbosity=3) + sanity_check(gng) + + + gng <- OptimizedGNG(max.nodes=max_nodes, train.online=TRUE, dim=3, verbosity=3, value.range=c(-2,2)) + insertExamples(gng, ex) + run(gng) + online_converged(gng) + sanity_check(gng) + + expect_that(isRunning(gng), is_false()) +}) + +test_that("GNG is working on mouse dataset", { + print("GNG is working on mouse dataset") + data(cec.mouse1.spherical) + dataset = cec.mouse1.spherical + gng <- GNG(dataset, seed=778) + expect_that(gng$getMeanError() < 0.1, is_true()) + expect_that(all(gng$getClustering() == predict(gng,dataset)), is_true()) + gng.refit <- GNG(dataset, seed=778) + # Seeding works => error statistics are the same + expect_that(all(abs(errorStatistics(gng.refit) - errorStatistics(gng)) < 1e-2), is_true() ) +}) + +test_that("GNG clustering and predict are returning the same", { + print("GNG clustering and predict are returning the same") + X <- replicate(10, rnorm(20)) + gng <- GNG(X) + expect_that(all(gng$getClustering() == predict(gng,X)), is_true()) +}) + +test_that("GNG errorStatistics and node retrieval work", { + print("GNG errorStatistics and node retrieval work") + X <- replicate(10, rnorm(20)) + gng <- GNG(X) + expect_that(length(errorStatistics(gng)) > 1, is_true()) + node(gng, 1) + pause(gng) + expect_that(isRunning(gng), is_false()) +}) + +test_that("GNG synchronization looks ok", { + print("GNG synchronization looks ok") + data(cec.mouse1.spherical) + dataset = cec.mouse1.spherical + synchronization_test <- function(){ + gng <- GNG(dataset, verbosity=3, max.nodes=20) + gng$.updateClustering() + sum_1 = (sum( gng$getClustering() != predict(gng, dataset))) + + gng <- GNG(train.online=TRUE, dim=2, verbosity=3, max.nodes=20) + gng$.insertExamples(dataset) + Sys.sleep(1) + gng$.pause() + gng$.updateClustering() + + sum_2 = (sum( gng$getClustering() != predict(gng, dataset))) + + expect_that(sum_1 == 0 && sum_2 == 0, is_true()) + expect_that(isRunning(gng), is_false()) + } + for(i in 1:3){ + synchronization_test() + } + +}) diff --git a/tests/testthat/test_gng_utility.R b/tests/testthat/test_gng_utility.R new file mode 100644 index 00000000..e70d63d1 --- /dev/null +++ b/tests/testthat/test_gng_utility.R @@ -0,0 +1,68 @@ +library(testthat) + +test_that("GNG Utility works", { + print("GNG Utility works") + + max_nodes <- 500 + + # Construct gng object + gng <- GNG(max.nodes=max_nodes, train.online=TRUE, dim=3, verbosity=3, k=1.3) + + # Construct examples, here we will use a sphere + ex <- gng.preset.sphere(N=10000) + labels <- round(runif(10000)*3) + insertExamples(gng, ex, labels) + + # Run algorithm in parallel + run(gng) + meanError(gng) + + + # Wait for it to converge + print("Adding juped distribution. 15s sleep") + Sys.sleep(15.0) + pause(gng) + plot(gng, mode="2d.errors") #0.068 without utility , 10 times less with + + ex2 <- gng.preset.cube(N=10000, r=1.0, center=c(3.0,3.0,3.0)) + insertExamples(gng, ex2, labels) + + + run(gng) + print("Test::Jumped distribution added. 15s sleep") + Sys.sleep(15.0) + pause(gng) + plot(gng, mode="2d.errors") + + g <- convertToIGraph(gng) + length(V(g)) + + if("rgl" %in% rownames(installed.packages()) == TRUE) { + plot(gng, mode=gng.plot.3d) + } + + + + print("Test::Graph after jumped distribution") + + ig <- convertToIGraph(gng) + + # Running unit tests (almost) + test_that("GNG has not isolated vertexes", { + expect_that(any(degree(ig)==0), is_false()) + }) + + print("Test::No isolated vertexes") + + + test_that("GNG has converged", { + error_before = meanError(gng) + expect_that(error_before < 50/max_nodes, is_true() ) + }) + + print("Test::Convergence test") + + terminate(gng) + + expect_that(isRunning(gng) == FALSE, is_true()) +}) diff --git a/tests/testthat/test_svm_2e.R b/tests/testthat/test_svm_2e.R new file mode 100644 index 00000000..87dbc71e --- /dev/null +++ b/tests/testthat/test_svm_2e.R @@ -0,0 +1,64 @@ +library(testthat) +library(gmum.r) + +data(svm_two_ellipsoids_dataset) +data(svm_breast_cancer_dataset) + +libraries <- c("libsvm", "svmlight") + +test_that("2e svm works with 2e dataset", { + ds <- svm.twoellipsoids.dataset + ds[,'V3'] <- as.factor(ds[,'V3']) + formula <- V3 ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", prep = "2e", C=10); + + x <- ds[,-3] + target <- ds[,3] + + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + expect_that(acc==1, is_true()) +}) +print("test::2eSVM works with 2e dataset") + +test_that("2e svm works with breast cancer dataset", { + + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + formula <- X1 ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", prep = "2e", C=10); + + x <- ds[,-1] + target <- ds[,1] + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + + expect_that(acc>0.95, is_true()) +}) +print("test::2eSVM works with breast cancer dataset") + +test_that("2e svm works better than normal SVM with breast cancer dataset", { + for (lib_i in libraries) { + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + formula <- X1 ~ . + + svm <- SVM( + formula, ds, core=lib_i, kernel="linear", prep = "none", C=1.5); + + x <- ds[,-1] + target <- ds[,1] + + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + + twoe_svm <- SVM( + formula, ds, core=lib_i, kernel="linear", prep = "2e", C=1.5); + twoe_pred <- predict(twoe_svm, x) + twoe_acc <- svm.accuracy(prediction=twoe_pred, target=target) + expect_true(twoe_acc>acc) + } +}) +print("test::2eSVM works slightly better with breast cancer dataset") + + diff --git a/tests/testthat/test_svm_acc.R b/tests/testthat/test_svm_acc.R new file mode 100644 index 00000000..91cbd057 --- /dev/null +++ b/tests/testthat/test_svm_acc.R @@ -0,0 +1,45 @@ +library(testthat) +library(gmum.r) + +data(svm_breast_cancer_dataset) + +test_that('accuracy is calculated', { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + x <- ds[,-1] + y <- ds[,1] + formula <- X1 ~ . + + svm <- SVM(formula, ds, core="libsvm", kernel="linear", C=10); + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=y) + + expect_that(acc, is_a("numeric")) + expect_that(acc <= 1, is_true()) + expect_that(acc > 0.95, is_true()) +}) +print("test::svm accuracy") + +test_that('accuracy checks input size', { + + ds <- as.data.frame(ds <- matrix( + c(0,1,0,1,0,0,1,1,0,1,1,0), + ncol=3, + nrow=4, + dimnames=list(c(),c("x","y","t")))) + ds[,3] <- as.factor(ds[,3]) + formula <- t ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", C=100); + + x <- ds[,-3] + pred <- predict(svm, x) + target <- c(1, 1) + + expect_error(svm.accuracy(target, pred), "Prediction's and target's length don't match!") +}) +print("test::accuracy chcecks inputs size") + + + diff --git a/tests/testthat/test_svm_basics.R b/tests/testthat/test_svm_basics.R new file mode 100644 index 00000000..8d049e42 --- /dev/null +++ b/tests/testthat/test_svm_basics.R @@ -0,0 +1,89 @@ +library(testthat) +library(gmum.r) + +data(svm_two_ellipsoids_dataset) +data(svm_breast_cancer_dataset) + +test_that('SVM functions are fool proof', { + + ds <- svm.twoellipsoids.dataset + ds[,'V3'] <- as.factor(ds[,'V3']) + f <- V3 ~ . + + expect_error( SVM(f, ds, core="xyz")) + expect_error( SVM(f, ds, kernel="xyz")) + expect_error( SVM(f, ds, prep="xyz")) + expect_error( SVM(f, ds, C = -1)) + expect_error( SVM(f, ds, gamma = -1)) + expect_error( SVM(f, ds, degree = 0)) + +}) +print("test::SVM throws user errors") + +test_that("SVM both constructor works", { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + x <- subset(ds, select = -X1) + y <- ds[,'X1'] + f <- X1 ~ . + + svm1 <- SVM(x,y) + svm2 <- SVM(f, ds) + + pred1 <- predict(svm1, x) + pred2 <- predict(svm2, x) + + expect_that(all.equal(pred1, pred2), is_true()) +}) +print("test::SVM both constructors work") + + +test_that("svm accepts and deals properly with factors", { + + library(mlbench) + data(Sonar) + library(caret) + set.seed(998) + + inTraining <- createDataPartition(Sonar$Class, p = .75, list = FALSE) + training <- Sonar[ inTraining,] + svm <- SVM(Class ~ ., training) + pred <- predict(svm, training[, 1:60]) + + expect_is(pred, "factor") + +}) + +test_that("svm max.iter works", { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + x <- subset(ds, select = -X1) + y <- factor(unlist(ds['X1'])) + + + start_time <- Sys.time() + svm1 <- SVM(x,y, core="libsvm", max.iter = 1) + restricted_time <- Sys.time() - start_time + + start_time <- Sys.time() + svm2 <- SVM(x,y, core="libsvm") + normal_time <- Sys.time() - start_time + + expect_true(restricted_time < normal_time) + + start_time <- Sys.time() + svm1 <- SVM(x,y, core="svmlight", max.iter = 1) + restricted_time <- Sys.time() - start_time + + start_time <- Sys.time() + svm2 <- SVM(x,y, core="svmlight") + normal_time <- Sys.time() - start_time + + expect_true(restricted_time < normal_time) +}) + + + diff --git a/tests/testthat/test_svm_benchmark.R b/tests/testthat/test_svm_benchmark.R new file mode 100644 index 00000000..e3f7ee33 --- /dev/null +++ b/tests/testthat/test_svm_benchmark.R @@ -0,0 +1,130 @@ +library(e1071) +library(klaR) +library(gmum.r) +library(testthat) + +# You need svmlight executables for this test to work! +test_that("our accuracy is not much different from other implementations", { + eps <- 0.05 + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + x <- subset(ds, select = -X1) + y <- factor(ds[,'X1']) + formula <- X1 ~ . + klar <- FALSE # set TRUE for svmlight testing if you have svmlight baniaries + glib_svm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="poly", C=1, verbosity=0, gamma=1) + if (!klar){ + warning("svmlight portion is disabled, enable it if you have svmlight binaries") + } + + # Linear kernel + e_svm <- e1071::svm(formula, data=ds, type='C-classification', kernel="linear", cost=1, gamma=1, scale=FALSE) + glib_svm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="linear", C=1, verbosity=0) + glight_svm <- gmum.r::SVM(formula, ds, core="svmlight", kernel="linear", C=1) + if(klar) klar_svmlight <- klaR::svmlight(formula, data=ds, type="C", svm.options="-v 0") + glib_esvm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="linear", prep="2e", C=1, verbosity=0) + e_pred <- as.numeric(as.matrix(fitted(e_svm))) + glib_pred <- predict(glib_svm, x) + glight_pred <- predict(glight_svm, x) + # NOTE: No idea how to mute this... (tried: invisible, capture.output, sink) + if(klar) klar_svmlight_pred <- as.numeric(as.matrix(predict(klar_svmlight, x)$class)) + glib_epred <- predict(glib_esvm, x) + + print("0. linear kernel:") + + y.factor <- as.factor(y) + e_acc <- svm.accuracy(e_pred, y) + print(sprintf("e1071 acc: %f", e_acc)) + glib_acc <- svm.accuracy(glib_pred, y.factor) + print(sprintf("gmum libsvm acc: %f", glib_acc)) + if(klar) klar_acc <- svm.accuracy(klar_svmlight_pred, y) + if(klar) print(sprintf("klaR svmlight acc: %f", klar_acc)) + glight_acc <- svm.accuracy(glight_pred, y.factor) + print(sprintf("gmum svmlight acc: %f", glight_acc)) + + acc <- svm.accuracy(glib_epred, y.factor) + print(sprintf("gmum libsvm 2e acc: %f", acc)) + + expect_that( abs(e_acc-glib_acc) < eps, is_true()) + if(klar) expect_that( abs(klar_acc-glight_acc) < eps, is_true()) + + # Poly kernel + e_svm <- e1071::svm(formula, data=ds, type='C-classification', kernel="poly", cost=1, gamma=1, scale=FALSE) + glib_svm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="poly", C=1, verbosity=0, gamma=1) + glight_svm <- gmum.r::SVM(formula, ds, core="svmlight", kernel="poly", C=1, gamma=1, coef0=1) + if(klar) klar_svmlight <- klaR::svmlight(formula, data=ds, type="C", svm.options="-v 0 -t 1 -s 1 -r 1") + + e_pred <- as.numeric(as.matrix(fitted(e_svm))) + glib_pred <- predict(glib_svm, x) + glight_pred <- predict(glight_svm, x) + if(klar) klar_svmlight_pred <- as.numeric(as.matrix(predict(klar_svmlight, x)$class)) + + print ("1. poly kernel:") + + y.factor <- as.factor(y) + e_acc <- svm.accuracy(e_pred, y) + print(sprintf("e1071 acc: %f", e_acc)) + glib_acc <- svm.accuracy(glib_pred, y.factor) + print(sprintf("gmum libsvm acc: %f", glib_acc)) + if(klar) klar_acc <- svm.accuracy(klar_svmlight_pred, y) + if(klar) print(sprintf("klaR svmlight acc: %f", klar_acc)) + glight_acc <- svm.accuracy(glight_pred, y.factor) + print(sprintf("gmum svmlight acc: %f", glight_acc)) + + expect_that( e_acc-glib_acc < eps, is_true()) + if(klar) expect_that( abs(klar_acc-glight_acc) < eps, is_true()) + + # RBF kernel + e_svm <- e1071::svm(formula, data=ds, type='C-classification', kernel="radial", cost=1, gamma=1, scale=FALSE) + glib_svm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="rbf", C=1, verbosity=0, gamma=1) + glight_svm <- gmum.r::SVM(formula, ds, core="svmlight", kernel="rbf", C=1, gamma=1) + if(klar) klar_svmlight <- klaR::svmlight(formula, data=ds, type="C", svm.options="-v 0 -t 2 -s 1 -r 1") + + e_pred <- as.numeric(as.matrix(fitted(e_svm))) + glib_pred <- predict(glib_svm, x) + glight_pred <- predict(glight_svm, x) + if(klar) klar_svmlight_pred <- as.numeric(as.matrix(predict(klar_svmlight, x)$class)) + + + print ("2. rbf kernel:") + y.factor <- as.factor(y) + e_acc <- svm.accuracy(e_pred, y) + print(sprintf("e1071 acc: %f", e_acc)) + glib_acc <- svm.accuracy(glib_pred, y.factor) + print(sprintf("gmum libsvm acc: %f", glib_acc)) + if(klar) klar_acc <- svm.accuracy(klar_svmlight_pred, y) + if(klar) print(sprintf("klaR svmlight acc: %f", klar_acc)) + glight_acc <- svm.accuracy(glight_pred,y.factor) + print(sprintf("gmum svmlight acc: %f", glight_acc)) + expect_that( abs(e_acc-glib_acc) < eps, is_true()) + if(klar) expect_that( abs(klar_acc-glight_acc) < eps, is_true()) + + # Sigmoid kernel + e_svm <- e1071::svm(formula, data=ds, type='C-classification', kernel="sigmoid", cost=1, gamma=1, scale=FALSE) + glib_svm <- gmum.r::SVM(formula, ds, core="libsvm", kernel="sigmoid", C=1, verbosity=0) + glight_svm <- gmum.r::SVM(formula, ds, core="svmlight", kernel="sigmoid", C=1) + # -r and -s are sigmoid kernel parameters. 1/dimensions = 1/11 = 0.09090909 + if(klar) klar_svmlight <- klaR::svmlight(formula, data=ds, type="C", svm.options="-v 0 -t 3 -r -1 -s 0.09090909") + + e_pred <- as.numeric(as.matrix(fitted(e_svm))) + glib_pred <- predict(glib_svm, x) + glight_pred <- predict(glight_svm, x) + if(klar) klar_svmlight_pred <- as.numeric(as.matrix(predict(klar_svmlight, x)$class)) + + print ("3. sigmoid kernel:") + y.factor <- as.factor(y) + e_acc <- svm.accuracy(e_pred, y) + print(sprintf("e1071 acc: %f", e_acc)) + glib_acc <- svm.accuracy(glib_pred, y.factor) + print(sprintf("gmum libsvm acc: %f", glib_acc)) + if(klar) klar_acc <- svm.accuracy(klar_svmlight_pred, y) + if(klar) print(sprintf("klaR svmlight acc: %f", klar_acc)) + glight_acc <- svm.accuracy(glight_pred, y.factor) + print(sprintf("gmum svmlight acc: %f", glight_acc)) + + expect_that( abs(e_acc-glib_acc) < eps || glib_acc > e_acc, is_true()) + if(klar) expect_that( abs(klar_acc-glight_acc) < eps, is_true()) + +}) + diff --git a/tests/testthat/test_svm_bigdata.R b/tests/testthat/test_svm_bigdata.R new file mode 100644 index 00000000..5153e62e --- /dev/null +++ b/tests/testthat/test_svm_bigdata.R @@ -0,0 +1,32 @@ +library(gmum.r) +library(e1071) +library(testthat) + + + + +if(!file.exists(svm.colon_cancer.path)) { + print(svm.colon_cancer.path) + warning("Please run download_svm_data.R from main repo directory in order to download big data sets devtools:install") +} else { + test_that("2e works with really big dataset", { + + ds <- as.data.frame(svm.dataset.colon_cancer()) + formula <- V1 ~ . + + svm <- SVM(formula, ds, core="libsvm", kernel="linear", prep = "none", C=10); + twoe_svm <- SVM(formula, ds, core="libsvm", kernel="linear", prep = "2e", C=10); + + # TODO: check is those a re correct columns + x <- ds[,-1] + target <- ds[,1] + + pred <- predict(svm, x) + twoe_pred <- predict(twoe_svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + twoe_acc <- svm.accuracy(prediction=twoe_pred, target=target) + + print(sprintf("SVM acc: %f", acc)) + print(sprintf("2eSVM acc: %f", twoe_acc)) + }) +} diff --git a/tests/testthat/test_svm_caret.R b/tests/testthat/test_svm_caret.R new file mode 100644 index 00000000..0a862058 --- /dev/null +++ b/tests/testthat/test_svm_caret.R @@ -0,0 +1,44 @@ +library(testthat) +library(gmum.r) + +test_that("Basic caret support for SVM works ", { + library(caret) + library(mlbench) + + set.seed(777) + + data(Sonar) + + print("Fitting SVM to Sonar dataset") + + inTraining <- createDataPartition(Sonar$Class, p = .75, list = FALSE) + training <- Sonar[ inTraining,] + testing <- Sonar[-inTraining,] + + + + fitControl <- trainControl(method = "cv", + ## 10-fold CV... + number = 2, + ## repeated 2 times + repeats = 1, + verboseIter=FALSE + ) + + + + model <- train(Class ~ ., data = training, + method = caret.gmumSvmRadial, + preProc = c("center", "scale"), + tuneLength = 8, + trControl = fitControl, + tuneGrid = expand.grid(C=c(1,10,100), gamma=c(0.001, 0.01, 0.1)), + verbosity = 0) + + print(model) + summary(model) + + expect_that(max(model$results$Accuracy) > 0.7, is_true()) +}) +print("test::svm caret") + diff --git a/tests/testthat/test_svm_class_and_example_weights.R b/tests/testthat/test_svm_class_and_example_weights.R new file mode 100644 index 00000000..6ac87b1e --- /dev/null +++ b/tests/testthat/test_svm_class_and_example_weights.R @@ -0,0 +1,38 @@ +library(gmum.r) +library(testthat) + +core <- "svmlight" + +x <- cbind(c(2.76405234597, 1.97873798411, 2.86755799015, 1.95008841753, 0.896781148206, 1.14404357116, + 1.76103772515, 1.44386323275, 2.49407907316, 1.31306770165, -2.55298981583, 0.86443619886, + 2.26975462399, 0.0457585173014, 1.53277921436, 0.154947425697, -0.88778574763, -0.347912149326, + 1.23029068073, -0.387326817408), + c(1.40015720837, 3.2408931992, 0.0227221201236, 0.848642791702, 1.41059850194, 2.45427350696, + 1.12167501649, 1.33367432737, 0.794841736234, 0.145904260698, 0.65361859544, -0.742165020406, + -1.4543656746, -0.187183850026, 1.4693587699, 0.378162519602, -1.98079646822, 0.156348969104, + 1.20237984878, -0.302302750575)) + +y <- as.factor(c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1)) + +ex_weights <- c(1.04855297, 1.42001794, 1.70627019, 1.9507754, 0.50965218, 0.4380743, + 1.25279536, 0.77749036, 1.61389785, 3.1911042, 0.89546656, 0.3869025, + 0.51080514, 1.18063218, 0.02818223, 2.14165935, 0.33258611, 1.51235949, + 3.17161047, 1.81370583) + +sk_ex_SV <- cbind(c(0.86443619886, 2.26975462399, 1.53277921436, 0.154947425697, 1.23029068073, + 0.896781148206, 1.14404357116, 1.76103772515, 1.44386323275, 1.31306770165), + c(-0.742165020406, -1.4543656746, 1.4693587699, 0.378162519602, 1.20237984878, + 1.41059850194, 2.45427350696, 1.12167501649, 1.33367432737, 0.145904260698)) + +cl_weights <- c("1"=1, "-1"=10) +df <- data.frame(x, y) + +ex_weights_svm <- SVM(formula=y~., data=df, core="svmlight", kernel="linear", C=1.0, example.weights=ex_weights) +cl_weights_svm <- SVM(formula=y~., data=df, core="svmlight", kernel="linear", C=1.0, class.weights=cl_weights) +ex_cl_weights_svm <- SVM(formula=y~., data=df, core="svmlight", kernel="linear", C=1.0, class.weights=cl_weights, example.weights=ex_weights) + +ex_SV <- ex_weights_svm$SV + +print(length(ex_SV)) +print(length(sk_ex_SV)) + diff --git a/tests/testthat/test_svm_class_weights.R b/tests/testthat/test_svm_class_weights.R new file mode 100644 index 00000000..e8821f42 --- /dev/null +++ b/tests/testthat/test_svm_class_weights.R @@ -0,0 +1,65 @@ +library(testthat) +library(e1071) +library(caret) +library(gmum.r) + + +test_that('Simple test that class.weights works for libsvm and svmlight', { + library(SparseM) + + for(weight_pos in c(1,2,10)){ + for(core in c("svmlight", "libsvm")){ + liver <- read.matrix.csr(system.file("dev", "datasets", "svm", "liver-disorders", mustWork=TRUE, package="gmum.r")) + x <- liver$x + # Ugly change of level + y <- liver$y + y <- as.numeric(y) + y[y==1] <- 3 + y <- as.factor(y) + + i <- as.numeric(createDataPartition(y, times=1, list=FALSE)) + wsvm <- SVM(x[i, ], y[i], core=core, kernel="linear", + C=1, class.weights=list("3"=weight_pos,"2"=3.0)) + pred.1 <- svm.accuracy(predict(wsvm, x[-i, ]), y[-i]) + + wsvm <- SVM(x[i, ], y[i], core=core, kernel="linear", + C=1, class.weights=list("2"=3.0,"3"=weight_pos)) + pred.2 <- svm.accuracy(predict(wsvm, x[-i, ]), y[-i]) + # We have good names in class.weights support + expect_that(pred.1 == pred.2, is_true()) + + # Compare to e1071 (libsvm) + esvm <- e1071::svm(x=x[i,], y=y[i],type='C-classification',scale=FALSE, kernel='linear', C=1, + class.weights = c("3"=weight_pos, "2"=3.)) + pred.esvm <- svm.accuracy(predict(esvm, x[-i]), y[-i]) + # Prediction e1071 \approx libsvm/svmlight from gmum.R + # This should be true for small datasets + expect_that(pred.1 == pred.2 && abs(pred.1 - pred.esvm) < 1e-2, is_true()) + } + } +}) +print("test::svm class weights") + +test_that("auto class weighting works for svm" ,{ + library(caret) + library(e1071) + liver <- read.matrix.csr(system.file("dev", "datasets", "svm", "liver-disorders", mustWork=TRUE, package="gmum.r")) + x <- liver$x + y <- as.factor(liver$y) + set.seed(777) + i <- as.numeric(createDataPartition(y, times=1, list=FALSE)) + svm <- SVM(x[i],y[i], C=1, class.weights = 'auto') + w <- svm$classWeights + p <- predict(svm, x[-i]) + acc <- svm.accuracy(p, y[-i]) + + e_svm <- e1071::svm(x[i],y[i], type='C-classification', kernel="linear", + class.weights = list("1"=w[1], "2"=w[2]), cost=1, scale=FALSE) + e_p <- predict(e_svm, x[-i]) + e_acc <- svm.accuracy(e_p, y[-i]) + + diff <- w - c(1.15942, 0.84057) + + expect_true(mean(diff) < 1e-2) +}) +print("test::svm auto class weights") diff --git a/tests/testthat/test_svm_datasets.R b/tests/testthat/test_svm_datasets.R new file mode 100644 index 00000000..da6ff8b6 --- /dev/null +++ b/tests/testthat/test_svm_datasets.R @@ -0,0 +1,75 @@ +library(testthat) +library(gmum.r) + +data(svm_breast_cancer_dataset) +data(svm_two_ellipsoids_dataset) +data(svm_two_circles_dataset) + +test_that('breast_cancer dataset works', { + + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + formula <- X1 ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", C=1) + + x <- ds[,-1] + y <- ds[,1] + + pred <- predict(svm, x) + diff <- length(pred) - length(y) + + expect_that(diff == 0, is_true()) +}) +print("test::breast cancer dataset") + +test_that('2e dataset works', { + + ds <- svm.twoellipsoids.dataset + ds[,3] <- as.factor(ds[,3]) + formula <- V3 ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", C=1) + + x <- ds[,-3] + target <- ds[,3] + + pred <- predict(svm, x) + diff <- length(pred) - length(target) + + expect_that(diff == 0, is_true()) +}) +print("test::2e dataset") + +test_that('2 circles dataset works', { + + ds <- svm.twocircles.dataset + ds[,3] <- as.factor(ds[,3]) + formula <- V3 ~ . + svm <- SVM(formula, ds, core="libsvm", kernel="linear", C=1) + + x <- ds[,-3] + target <- ds[,3] + + pred <- predict(svm, x) + diff <- length(pred) - length(target) + + expect_that(diff==0, is_true()) +}) +print("test::2 circles dataset") + +test_that('xor dataset works', { + + ds <- as.data.frame(ds <- matrix( + c(0,1,0,1,0,0,1,1,0,1,1,0), + ncol=3, + nrow=4, + dimnames=list(c(),c("x","y","t")))) + ds[,3] <- as.factor(ds[,3]) + formula <- t ~ . + svm <- SVM(formula, ds, core="svmlight", kernel="linear", C=1) + x <- ds[,-3] + target <- ds[,3] + pred <- predict(svm, x) + diff <- length(pred) - length(target) + expect_that(diff == 0, is_true()) +}) +print("test::xor dataset") diff --git a/tests/testthat/test_svm_example_weights.R b/tests/testthat/test_svm_example_weights.R new file mode 100644 index 00000000..30c3af2b --- /dev/null +++ b/tests/testthat/test_svm_example_weights.R @@ -0,0 +1,162 @@ +library(gmum.r) +library(testthat) +library(ggplot2) + +core <- "svmlight" +verbose <- FALSE # use TRUE only for local testing + +if (verbose) { + print(sprintf("Library used for non-weights svm: %s", core)) +} + +x <- cbind(c(2.76405235, 1.97873798, 2.86755799, 1.95008842, 0.89678115, 1.14404357, + 1.76103773, 1.44386323, 2.49407907, 1.3130677, -2.55298982, 0.8644362, + 2.26975462, 0.04575852, 1.53277921, 0.15494743, -0.88778575, -0.34791215, + 1.23029068, -0.38732682), + c(1.40015721, 3.2408932, 0.02272212, 0.84864279, 1.4105985, 2.45427351, + 1.12167502, 1.33367433, 0.79484174, 0.14590426, 0.6536186, -0.74216502, + -1.45436567, -0.18718385, 1.46935877, 0.37816252, -1.98079647, 0.15634897, + 1.20237985, -0.30230275)) + +y <- c(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1) + +weights <- c(1.04855297, 1.42001794, 1.70627019, 1.9507754, 0.50965218, 0.4380743, + 1.25279536, 0.77749036, 1.61389785, 3.1911042, 0.89546656, 0.3869025, + 0.51080514, 1.18063218, 0.02818223, 2.14165935, 0.33258611, 1.51235949, + 3.17161047, 1.81370583) + +sk_no_weights_SV_rbf <- cbind(c(-2.55298982, 0.8644362, 2.26975462, 1.53277921, 0.15494743, -0.88778575, + 1.23029068, 2.76405235, 1.97873798, 2.86755799, 0.89678115, 1.14404357, + 1.76103773, 1.44386323, 1.3130677), + c(0.6536186, -0.74216502, -1.45436567, 1.46935877, 0.37816252, -1.98079647, + 1.20237985, 1.40015721, 3.2408932, 0.02272212, 1.4105985, 2.45427351, + 1.12167502, 1.33367433, 0.14590426)) + +sk_no_weights_SV_linear <- cbind(c(2.26975462, 1.53277921, 0.15494743, 1.23029068, 2.86755799, 1.95008842, + 0.89678115, 1.3130677), + c(-1.45436567, 1.46935877, 0.37816252, 1.20237985, 0.02272212, 0.84864279, + 1.4105985, 0.14590426)) + +sk_weights_SV_rbf <- cbind(c(-2.55298982, 0.8644362, 2.26975462, 0.04575852, 1.53277921, 0.15494743, + -0.88778575, 1.23029068, 2.76405235, 1.97873798, 2.86755799, 0.89678115, + 1.14404357, 1.76103773, 1.44386323, 1.3130677), + c(0.6536186, -0.74216502, -1.45436567, -0.18718385, 1.46935877, 0.37816252, + -1.98079647, 1.20237985, 1.40015721, 3.2408932, 0.02272212, 1.4105985, + 2.45427351, 1.12167502, 1.33367433, 0.14590426) ) + +sk_weights_SV_linear <- cbind(c(0.8644362, 2.26975462, 1.53277921, 0.15494743, 1.23029068, 0.89678115, + 1.14404357, 1.76103773, 1.44386323, 1.3130677), + c(-0.74216502, -1.45436567, 1.46935877, 0.37816252, 1.20237985, 1.4105985, + 2.45427351, 1.12167502, 1.33367433, 0.14590426)) + +df <- data.frame(x, y) + +# ------------------------------------------------------- + +no_weights_svm_rbf <- SVM(formula=y~., data=df, core=core, kernel="rbf", C=1.0, gamma=0.5) +SV_rbf <- no_weights_svm_rbf$SV + +if (verbose) { + + print(sprintf(" sklearn rbf nSV: %i", nrow(sk_no_weights_SV_rbf))) + print(sprintf(" gmum.r rbf nSV %i", nrow(SV_rbf))) + + p1 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(SV_rbf), aes(X1, X2), color="black") + + ggtitle("gmum.r rbf") + + theme(legend.position="none") + + p2 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(sk_no_weights_SV_rbf), aes(X1, X2), color="black") + + ggtitle("sklearn rbf") + + theme(legend.position="none") + + multiplot(p1, p2, cols=2) +} +# ------------------------------------------------------- + +no_weights_svm_linear <- SVM(formula=y~., data=df, core=core, kernel="linear", C=1.0) +SV_linear <- no_weights_svm_linear$SV + +if (verbose) { + + print(sprintf(" sklearn linear nSV: %i", nrow(sk_no_weights_SV_linear))) + print(sprintf(" gmum.r linear nSV %i", nrow(SV_linear))) + + p3 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(SV_linear), aes(X1, X2), color="black") + + ggtitle("gmum.r linear") + + theme(legend.position="none") + + p4 <- ggplot() + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(sk_no_weights_SV_linear), aes(X1, X2), color="black") + + ggtitle("sklearn linear") + + theme(legend.position="none") + + multiplot(p3, p4, cols=2) +} + +# ------------------------------------------------------- +if (verbose) { + print("For weighted examples only svmlight is available") +} + +weights_svm_rbf <- SVM(formula=y~., data=df, core="svmlight", kernel="rbf", C=1.0, gamma=0.5, example.weights=weights) +weights_SV_rbf <- weights_svm_rbf$SV + +if (verbose) { + + print(sprintf(" sklearn weighted rbf nSV: %i", nrow(sk_weights_SV_rbf))) + print(sprintf(" gmum.r weighted rbf nSV %i", nrow(weights_SV_rbf))) + + p5 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(weights_SV_rbf), aes(X1, X2), color="black") + + ggtitle("gmum.r weighted rbf") + + theme(legend.position="none") + + p6 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(sk_weights_SV_rbf), aes(X1, X2), color="black") + + ggtitle("sklearn weighted rbf") + + theme(legend.position="none") + + multiplot(p5, p6, cols=2) +} +# ------------------------------------------------------- + +weights_svm_linear <- SVM(formula=y~., data=df, core="svmlight", kernel="linear", C=1.0, example.weights=weights) +weights_SV_linear <- weights_svm_linear$SV + +if (verbose) { + + print(sprintf(" sklearn weighted linear nSV: %i", nrow(sk_weights_SV_linear))) + print(sprintf(" gmum.r weighted linear nSV %i", nrow(weights_SV_linear))) + + p7 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(weights_SV_linear), aes(X1, X2), color="black") + + ggtitle("gmum.r weighted linear") + + theme(legend.position="none") + + p8 <- ggplot() + + geom_point(data=df, aes(X1,X2, color=factor(y), size=2)) + + geom_point(data=data.frame(sk_weights_SV_linear), aes(X1, X2), color="black") + + ggtitle("sklearn weighted linear") + + theme(legend.position="none") + + multiplot(p7, p8, cols=2) +} + +diff <- sum(abs(weights_SV_rbf[order(weights_SV_rbf[,1]),] - sk_weights_SV_rbf[order(sk_weights_SV_rbf[,1]),])) +if (verbose) { + print(sprintf("Difference of (compatible) weighted rbf SVs: %.10f", diff)) +} + +test_that("weighted exaples in svmlight work like in sklearn SVC", { + expect_that(diff < 1e-2, is_true()) +}) +print("test::SVM weighted exaples in svmlight work like in sklearn SVC") diff --git a/tests/testthat/test_svm_light.R b/tests/testthat/test_svm_light.R new file mode 100644 index 00000000..51a4afeb --- /dev/null +++ b/tests/testthat/test_svm_light.R @@ -0,0 +1,43 @@ +library(gmum.r) +library(testthat) + +test_that("svmlight works" , { + data(svm_two_ellipsoids_dataset) + ds <- svm.twoellipsoids.dataset + ds[,'V3'] <- as.factor(ds[,'V3']) + formula <- V3 ~ . + svm <- SVM(formula, ds, core="svmlight", kernel="linear", C=10, verbosity = 3); + + x <- ds[,-3] + target <- ds[,3] + + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + print(acc) + + expect_that(acc, is_a("numeric")) + expect_that(acc == 1, is_true()) +}) +print("test::svmlight works with simple 2e dataset") + +test_that('accuracy is calculated', { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + formula <- X1 ~ . + svm <- SVM(formula, ds, core="svmlight", kernel="linear", C=100); + + x <- ds[,-1] + target <- ds[,1] + + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=target) + print(acc) + + expect_that(acc, is_a("numeric")) + expect_that(acc <= 1, is_true()) + expect_that(acc > 0.5, is_true()) + +}) +print("test::accuracy") diff --git a/tests/testthat/test_svm_ligth_indeterminism.R b/tests/testthat/test_svm_ligth_indeterminism.R new file mode 100644 index 00000000..94682919 --- /dev/null +++ b/tests/testthat/test_svm_ligth_indeterminism.R @@ -0,0 +1,17 @@ +library(gmum.r) +set.seed(777) +test_that("svmlight is deterministic: gives always this same number of iterations and SVs for breastcancer", { + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + formula <- X1 ~ . + models <- sapply(1:10, function(x) svm <- SVM(formula, ds, core="svmlight", kernel="linear", C=100)) + first_iteration = models[[1]]$iterations + first_n_SVs = models[[1]]$numberSV + for (model in models) { + print(model$iterations) + expect_that(first_iteration == model$iterations, is_true()) + expect_that(first_n_SVs == model$numberSV, is_true()) + } +}) +print("Finally svmlight is deterministic (However, not multithreaded safe. #FIXME).") diff --git a/tests/testthat/test_svm_multi.R b/tests/testthat/test_svm_multi.R new file mode 100644 index 00000000..b59414cf --- /dev/null +++ b/tests/testthat/test_svm_multi.R @@ -0,0 +1,72 @@ +library(testthat) +library(gmum.r) +library(caret) + +test_that("Test basic svm multi", { + set.seed(777) + + centers <- list(c(0,0), + c(10,0), + c(0,10), + c(3,3)) + + sigmas <- list(matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 0, 1), nrow=2), + matrix(c(1, 0, 1, 1), nrow=2)) + + sizes <- list(100, 100, 100, 101) + + n <- length(centers) + my_df <<- c() + for(i in 1:n){ + my_df <<- rbind(my_df, cbind(mvrnorm(n=sizes[[i]], mu=centers[[i]], Sigma=sigmas[[i]]), rep(i,sizes[[i]]))) + } + my_df <<- data.frame(my_df) + colnames(my_df) <- c("x1","x2", "y") + my_df[,3] <- as.factor(my_df[,3]) + + sv <- SVM(x=my_df[,1:2], y=my_df[,3], class.type="one.versus.all") + preds <- predict(sv, my_df[,1:2]) + acc <- sum(diag(table(preds, my_df[,3])))/sum(table(preds, my_df[,3])) + #expect_that(acc > 0.96, is_true()) + plot(sv, X=my_df[,1:2]) + data(iris) + + sv.ova <- SVM(Species ~ ., data=iris, class.type="one.versus.all") + preds <- predict(sv.ova, iris[,1:4]) + acc.ova <- sum(diag(table(preds, iris$Species)))/sum(table(preds, iris$Species)) + plot(sv.ova) + + sv.ovo <- SVM(x=iris[,1:4], y=iris[,5], class.type="one.versus.one") + preds <- predict(sv.ovo, iris[,1:4]) + acc.ovo <- sum(diag(table(preds, iris$Species)))/sum(table(preds, iris$Species)) + plot(sv.ovo) + + e1.sv <- e1071::svm(Species ~., data=iris, kernel='linear') + preds <- predict(e1.sv, iris[,1:4]) + acc.e1 <- sum(diag(table(preds, iris$Species)))/sum(table(preds, iris$Species)) + + expect_that(acc.ova < acc.ovo && abs(acc.e1 - acc.ovo) < 1e-1, is_true()) + + + data(fgl) + X <- fgl[,1:9] + Y <- fgl[,10] + + sv.ova <- SVM(type ~ ., data=fgl, class.type="one.versus.all") + preds <- as.factor(predict(sv.ova, X)) + acc.ova <- sum(preds==as.factor(Y)) / length(preds) + + sv.ovo <- SVM(type ~ ., data=fgl, class.type="one.versus.one", C=1) + preds <- predict(sv.ovo, X) + acc.ovo <- sum(preds==as.factor(Y)) / length(preds) + + e1.sv <- e1071::svm(type ~., data=fgl, kernel='linear', C=1) + preds <- predict(e1.sv, X) + acc.e1 <- sum(diag(table(preds, Y)))/sum(table(preds, Y)) + + + expect_that(acc.ova < acc.ovo && abs(acc.e1 - acc.ovo) < 1e-1, is_true()) +}) + diff --git a/tests/testthat/test_svm_options.R b/tests/testthat/test_svm_options.R new file mode 100644 index 00000000..abfd6838 --- /dev/null +++ b/tests/testthat/test_svm_options.R @@ -0,0 +1,51 @@ +library(testthat) +library(gmum.r) + +data(svm_breast_cancer_dataset) + +test_that('command line svm.options are being handled for core="libsvm"', { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + x <- ds[,-1] + y <- ds[,1] + formula <- X1 ~ . + + # Prediction has lower accuracy (0.6500732) when LibSVM learns with C=0.0001 + svm <- SVM(formula, ds, core="libsvm", kernel="linear", svm.options="-c 0.0001"); + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=y) + expect_that(acc < 0.95, is_true()) + + # Prediction has good accuracy (0.9721816) when LibSVM learns with C=100 + svm <- SVM(formula, ds, core="libsvm", kernel="linear", svm.options="-c 100"); + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=y) + expect_that(acc > 0.95, is_true()) +}) + +test_that('command line svm.options are being handled for core="svmlight"', { + + data(svm_breast_cancer_dataset) + ds <- svm.breastcancer.dataset + ds[,'X1'] <- as.factor(ds[,'X1']) + x <- ds[,-1] + y <- ds[,1] + formula <- X1 ~ . + + # Prediction has lower accuracy (0.6500732) when SVMLight learns with C=0.0001 + svm <- SVM(formula, ds, core="svmlight", kernel="linear", svm.options="-c 0.0001"); + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=y) + expect_that(acc < 0.95, is_true()) + + # Prediction has good accuracy (0.9721816) when SVMLight learns with C=100 + svm <- SVM(formula, ds, core="svmlight", kernel="linear", svm.options="-c 100"); + pred <- predict(svm, x) + acc <- svm.accuracy(prediction=pred, target=y) + expect_that(acc > 0.95, is_true()) +}) + + + diff --git a/tests/testthat/test_svm_sparse.R b/tests/testthat/test_svm_sparse.R new file mode 100644 index 00000000..60bed1f0 --- /dev/null +++ b/tests/testthat/test_svm_sparse.R @@ -0,0 +1,33 @@ +library(testthat) +library(gmum.r) + +test_that("sparse matrices work", { + x <- read.matrix.csr(system.file("dev", "datasets", "svm", "dexter_train.data", mustWork=TRUE, package="gmum.r")) + y <- as.factor(unlist(read.table(system.file("dev", "datasets", "svm", "dexter_train.labels", mustWork=TRUE, package="gmum.r")))) + + libs <- c("svmlight", "libsvm") + kernels <- c("linear", "poly", "rbf", "sigmoid") + + for (lib_i in libs) { + for (kernel_i in kernels) { + print("") + print(sprintf("Kernel %s", kernel_i)) + print(sprintf("Testing %s", lib_i)) + + print("Learning...") + svm <- SVM(x=x, y=y, core=lib_i, kernel=kernel_i, C=1, verbosity=0) + + print("Prediction...") + pred_start_time <- Sys.time() + pred <- predict(svm, x) + pred_time <- Sys.time() - pred_start_time + print(sprintf("Prediction time %f", pred_time)) + + acc <- svm.accuracy(prediction=pred, target=y) + print(sprintf("Sparse acc, %s prediction: %f", lib_i, acc)) + } + } + +}) + + diff --git a/tests/testthat/test_svm_transductive.R b/tests/testthat/test_svm_transductive.R new file mode 100644 index 00000000..d5601176 --- /dev/null +++ b/tests/testthat/test_svm_transductive.R @@ -0,0 +1,121 @@ +# 1. Tests if we handle factors as e1071 +# 2. Tests if we get same results on induction task as svm_learn (hard coded) +# 3. Tests if transductive learning works on dataset from svmlight website +library(testthat) +library(e1071) +library(caret) +library(gmum.r) + + +test_that("Transduction and factors work as in respectively svmlight and libsvm", { + data(svm.transduction) + + train.transduction <- svm.transduction$tr + train.induction <- svm.transduction$ind + test <-svm.transduction$test + test$x <- train.transduction$x[11:610, ] + + + # Checks if we handle labels same way! + library(e1071) + svm.induction.e <- svm(x=train.induction$x, y=train.induction$y, kernel='linear') + svm.induction.e.pred <- predict(svm.induction.e, test$x[,1:9253]) + + svm.induction <- SVM(x=train.induction$x, y=train.induction$y, core="svmlight", verbosity=1) + svm.induction.pred <- predict(svm.induction, test$x[,1:9253]) + + + # Now revers labels + train.induction$y <- as.factor(as.numeric(train.induction$y)) + train.transduction$y <- as.numeric(train.transduction$y) + train.transduction$y[train.transduction$y == 1] = 0 + train.transduction$y <- as.factor(train.transduction$y) + test$y <- (as.numeric(test$y)) + test$y[test$y == 1] = 3 + test$y[test$y == 2] = 1 + test$y[test$y == 3] = 2 + test$y <- as.factor(test$y) + + # Both e1071 and gmum.r fails on incorrect factor + expect_that(svm.accuracy(svm.induction.e.pred, test$y) < 0.2 && abs(svm.accuracy(svm.induction.e.pred, test$y) - svm.accuracy(svm.induction.pred, test$y)) < 1e-2, is_true()) + + # Now reverse labelsagain + train.induction$y <- as.factor(as.numeric(train.induction$y)) + train.transduction$y <- as.numeric(train.transduction$y) + train.transduction$y[train.transduction$y == 1] = 0 + train.transduction$y <- as.factor(train.transduction$y) + test$y <- (as.numeric(test$y)) + test$y[test$y == 1] = 3 + test$y[test$y == 2] = 1 + test$y[test$y == 3] = 2 + test$y <- as.factor(test$y) + + svm.transduction <- SVM(x=train.transduction$x, y=train.transduction$y, + core="svmlight", + transductive.learning=TRUE) + svm.transduction.pred <- predict(svm.transduction, test$x) + + + svm.induction <- SVM(x=train.induction$x, y=train.induction$y, core="svmlight", verbosity=1) + svm.induction.pred <- predict(svm.induction, test$x[,1:9253]) + + # This is exactly what svm_learn and svm_classify return on this dataset + # If this fails, we probably changed some defaults in SVMLight - which would be a bug probably + expect_that(abs(svm.accuracy(svm.induction.pred, test$y) - 0.8433) < 1e-2, is_true()) + expect_that(abs(svm.accuracy(svm.transduction.pred, test$y) - 0.96) < 1e-2, is_true()) + + # Check if we pass posratio correctly + svm.transduction <- SVM(x=train.transduction$x, y=train.transduction$y, + core="svmlight", + transductive.learning=TRUE, + transductive.posratio=0.001) + svm.transduction.pred <- predict(svm.transduction, test$x) + + expect_that(abs(svm.accuracy(svm.transduction.pred, test$y) + - 0.5) < 1e-2, is_true()) +}) + +test_that("Transduction improves score", { + library(SparseM) + library(e1071) + library(caret) + library(gmum.r) + set.seed(777) + + x <- read.matrix.csr(system.file("dev", "datasets", "svm", "dexter_train.data", mustWork=TRUE, package="gmum.r")) + y <- as.factor(unlist(read.table(system.file("dev", "datasets", "svm", "dexter_train.labels", mustWork=TRUE, package="gmum.r")))) + + X <- SparseM::as.matrix(x) + Y <- as.numeric(y) + + i <- createDataPartition(Y, times=1, p=0.8, list=FALSE) + #A lot of transductive labels to make sure the effect is visible + K=as.integer(0.9*length(i)) + + X.test <- X[-i,] + Y.test <- Y[-i] + X.train <- X[i,] + Y.train <- Y[i] + X.train.tr1 <- X[i,] + Y.train.tr1 <- Y[i] + trans1 <- sample(1:length(i), K, replace=FALSE) + trans2 <- sample(1:length(i), K, replace=FALSE) + Y.train.tr1[trans1] = 0 + + NmK <- length(i) - K + + + svm.1 <- SVM(X.train, Y.train, core="svmlight", kernel="linear") + svm.2a <- SVM(X.train.tr1, Y.train.tr1, + transductive.learning = TRUE, core="svmlight", kernel="linear", verbosity=1) + sample1 <- sample(1:nrow(X.train), NmK, replace=FALSE) + svm.3a <- SVM(X.train[sample1,],as.factor(Y.train[sample1]), core="svmlight", kernel="linear") + + + svm.1.acc <- svm.accuracy(predict(svm.1, X.test), Y.test) + svm.2a.acc <- svm.accuracy(predict(svm.2a, X.test), Y.test) + svm.3a.acc <- svm.accuracy(predict(svm.3a, X.test), Y.test) + + expect_that(svm.1.acc > svm.2a.acc && svm.2a.acc > svm.3a.acc, is_true()) + +}) diff --git a/tests/testthat/twoe_external_test.R b/tests/testthat/twoe_external_test.R new file mode 100644 index 00000000..28584d6b --- /dev/null +++ b/tests/testthat/twoe_external_test.R @@ -0,0 +1,44 @@ +library(caret) +library(gmum.r) + +K = 10 + +data(svm_breast_cancer_dataset) + +ds = svm.breastcancer.dataset +folds = createFolds(ds$V1, k=K) +formula = V1 ~ . + +for (c in seq(-6,5,1) ){ + + mean_acc = 0 + mean_twoe_acc = 0 + + for ( i in seq(1,K,1) ) { + + train = ds[-folds[[i]],] + test = ds[folds[[i]],] + + svm <- SVM(formula, train, core="libsvm", kernel="linear", prep = "none", C=10^c); + twoe_svm <- SVM(formula, train, core="libsvm", kernel="linear", prep = "2e", C=10^c); + + print("----------") + + test_x = subset(test, select = -c(V1)) + target = test[,"V1"] + + pred <- predict(svm, test_x) + twoe_pred <- predict(twoe_svm, test_x) + + acc <- svm.accuracy(prediction=pred, target=target) + twoe_acc <- svm.accuracy(prediction=twoe_pred, target=target) + + mean_acc = mean_acc + acc + mean_twoe_acc = mean_twoe_acc + twoe_acc + + } + + print(sprintf("mean SVM acc with C=%f after %i folds: %f ", c, K, mean_acc/K)) + print(sprintf("mean 2eSVM acc with C=%f after %i folds: %f ", c, K, mean_twoe_acc/K)) +} + diff --git a/tests/testthat/xor.R b/tests/testthat/xor.R new file mode 100644 index 00000000..91348dba --- /dev/null +++ b/tests/testthat/xor.R @@ -0,0 +1,25 @@ +library(gmum.r) +ds <- matrix( + c(0,1,0,1,0,0,1,1,0,1,1,0), + ncol=3, + nrow=4, + dimnames=list(c(),c("x","y","t"))) +x <- ds[,c(1,2)] +y <- as.factor(ds[,3]) + +library(SparseM) +xs <- as(x, "matrix.csr") + + +svm <- SVM(xs,y, core="libsvm", kernel="linear", gamma=1, verbosity=6) +print("") +print("svmlight sparse: Alphas, SVs") +print(svm$alpha) +print(svm$SV) +predict(svm, xs) + +svm2 <- SVM(x,y, core="svmlight", kernel="linear", gamma=1) +print("") +print("svmlight dense: Alphas, SVs") +print(svm2$alpha) +print(svm2$SV)