Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/CMakeLists.txt =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/CMakeLists.txt +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/CMakeLists.txt @@ -1,3 +1,4 @@ add_subdirectory(hotspot) add_subdirectory(srad) +add_subdirectory(backprop) add_subdirectory(pathfinder) Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/CMakeLists.txt =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/CMakeLists.txt +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/CMakeLists.txt @@ -0,0 +1,9 @@ +set(PROG backprop) +list(APPEND CFLAGS -I${CMAKE_CURRENT_SOURCE_DIR}/../Common) +list(APPEND LDFLAGS -lm) +set(FP_ABSTOLERANCE 0.00001) +llvm_multisource(backprop + main.c + backpropKernel.c + ../Common/glibc_compat_rand.c +) Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.h =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.h +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.h @@ -0,0 +1,17 @@ +#ifndef _BACKPROP_H_ +#define _BACKPROP_H_ + +#define ETA 0.3 +#define MOMENTUM 0.3 + +#define LAYERSIZE 50000 +#define HIDDEN_SIZE 16 +#define OUTPUT_SIZE 1 + +// GAP Between array index, which we want to compare +// Otherwise very big reference output +#define GAP 1000 +#define ITERATIONS 100 +#define SEED 7 + +#endif Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.reference_output =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.reference_output +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backprop.reference_output @@ -0,0 +1,854 @@ +0.486904 +0.169460 +0.164359 +0.166336 +0.649690 +0.861499 +0.832199 +0.094743 +0.119128 +0.816543 +0.205882 +0.592891 +0.087559 +0.796062 +0.744477 +0.687136 +0.409324 +0.229926 +0.179635 +0.877838 +0.539623 +0.312723 +0.861203 +0.808536 +0.177309 +0.809247 +0.754710 +0.067780 +0.146609 +0.361246 +0.115128 +0.781544 +0.157673 +0.857635 +0.237355 +0.650418 +0.237758 +0.743666 +0.468748 +0.529723 +0.215633 +0.626604 +0.969903 +0.314463 +0.316265 +0.306560 +0.586213 +0.701777 +0.308720 +0.065967 +0.475705 +0.006781 +0.894918 +0.082432 +0.474321 +0.618719 +0.717301 +0.145195 +0.474082 +0.064673 +0.995587 +0.587960 +0.725732 +0.449825 +0.348487 +0.496353 +0.056022 +0.733142 +0.632272 +0.586970 +0.734965 +0.266502 +0.567189 +0.463670 +0.247176 +0.048959 +0.309932 +0.329291 +0.552086 +0.578002 +0.010839 +0.378603 +0.787619 +0.707582 +0.517028 +0.903286 +0.761052 +0.037288 +0.340989 +0.500029 +0.706227 +0.400263 +0.604100 +0.587762 +0.035494 +0.395687 +0.742392 +0.936557 +0.157238 +0.091819 +0.306959 +0.522987 +0.158246 +0.451586 +0.341405 +0.153599 +0.746181 +0.965518 +0.868914 +0.253051 +0.531270 +0.187243 +0.167433 +0.025339 +0.290073 +0.271832 +0.257672 +0.107266 +0.739753 +0.561513 +0.344770 +0.427377 +0.721956 +0.824100 +0.606198 +0.091298 +0.518672 +0.398766 +0.201572 +0.861439 +0.125994 +0.021869 +0.164385 +0.696674 +0.137864 +0.881026 +0.781743 +0.496318 +0.590933 +0.393627 +0.687981 +0.249277 +0.205042 +0.084858 +0.658598 +0.945728 +0.792871 +0.810222 +0.334534 +0.509398 +0.948633 +0.013944 +0.343790 +0.495413 +0.388143 +0.312615 +0.268426 +0.269383 +0.488265 +0.731685 +0.021148 +0.314089 +0.095228 +0.211388 +0.479143 +0.083758 +0.054526 +0.472355 +0.966395 +0.405499 +0.774144 +0.191788 +0.650573 +0.825028 +0.510799 +0.813576 +0.840174 +0.728239 +0.866810 +0.013991 +0.691535 +0.936661 +0.214873 +0.680678 +0.969755 +0.703441 +0.657556 +0.749357 +0.476680 +0.833719 +0.303190 +0.401366 +0.152698 +0.766773 +0.913836 +0.179406 +0.679888 +0.656515 +0.478503 +0.902813 +0.317965 +0.089955 +0.917405 +0.473295 +0.297531 +0.379192 +0.395923 +0.914856 +0.152049 +0.256836 +0.572860 +0.413123 +0.283900 +0.546914 +0.762867 +0.078366 +0.252134 +0.389397 +0.713406 +0.059313 +0.047755 +0.401767 +0.414904 +0.550295 +0.280434 +0.637486 +0.570443 +0.559826 +0.824632 +0.962578 +0.876549 +0.229606 +0.879848 +0.256530 +0.546549 +0.256880 +0.429799 +0.329948 +0.146506 +0.628658 +0.373861 +0.178370 +0.188486 +0.282002 +0.850437 +0.227461 +0.577096 +0.976803 +0.461388 +0.867883 +0.176248 +0.406663 +0.188396 +0.618732 +0.540315 +0.800248 +0.302276 +0.299371 +0.609532 +0.548765 +0.876263 +0.011296 +0.896179 +0.934407 +0.133758 +0.192955 +0.569836 +0.811829 +0.748895 +0.099560 +0.915284 +0.851207 +0.252244 +0.720756 +0.094707 +0.897521 +0.938609 +0.354180 +0.832194 +0.035654 +0.132694 +0.721929 +0.402036 +0.669377 +0.322254 +0.929812 +0.551876 +0.219777 +0.975957 +0.712079 +0.918480 +0.938536 +0.150169 +0.252420 +0.945808 +0.841902 +0.215112 +0.296069 +0.239179 +0.651664 +0.547891 +0.327145 +0.032217 +0.871813 +0.706527 +0.739539 +0.001742 +0.466369 +0.993264 +0.300265 +0.146795 +0.500471 +0.181720 +0.402686 +0.435536 +0.361496 +0.976999 +0.202771 +0.250412 +0.099853 +0.614044 +0.497282 +0.597065 +0.397432 +0.926044 +0.175141 +0.451027 +0.159222 +0.795392 +0.432297 +0.323300 +0.151536 +0.538906 +0.489358 +0.122051 +0.577818 +0.427214 +0.351494 +0.911522 +0.610660 +0.412886 +0.948257 +0.565660 +0.945980 +0.163688 +0.384139 +0.813094 +0.588959 +0.195437 +0.643436 +0.967633 +0.062374 +0.600315 +0.297019 +0.791664 +0.220852 +0.100434 +0.346943 +0.911014 +0.252738 +0.059306 +0.618800 +0.495962 +0.124256 +0.966807 +0.089358 +0.762168 +0.124338 +0.678421 +0.100658 +0.523136 +0.567417 +0.086273 +0.632464 +0.389868 +0.017045 +0.457628 +0.524459 +0.499432 +0.867170 +0.158847 +0.138112 +0.351105 +0.514350 +0.655964 +0.509503 +0.365076 +0.593608 +0.920620 +0.857922 +0.274400 +0.088859 +0.834528 +0.553684 +0.258293 +0.763792 +0.455942 +0.073403 +0.215271 +0.797282 +0.353709 +0.983348 +0.127693 +0.092793 +0.861815 +0.660237 +0.302530 +0.703228 +0.531274 +0.976114 +0.328835 +0.793327 +0.681646 +0.014869 +0.660660 +0.774493 +0.616683 +0.389345 +0.238596 +0.636573 +0.508932 +0.912056 +0.016349 +0.112609 +0.183424 +0.463693 +0.434572 +0.023890 +0.914148 +0.272437 +0.622207 +0.542648 +0.946441 +0.745609 +0.080281 +0.634881 +0.680978 +0.685859 +0.238287 +0.544715 +0.650107 +0.276991 +0.370903 +0.755879 +0.392838 +0.011766 +0.937425 +0.959580 +0.462270 +0.548676 +0.654539 +0.453542 +0.705758 +0.261194 +0.862915 +0.198448 +0.168872 +0.088258 +0.014719 +0.073151 +0.146102 +0.502719 +0.244926 +0.076047 +0.292450 +0.564880 +0.172216 +0.282370 +0.259348 +0.881608 +0.139102 +0.442766 +0.462773 +0.979178 +0.737619 +0.834233 +0.756503 +0.541338 +0.223870 +0.647274 +0.539306 +0.250928 +0.063235 +0.193503 +0.762062 +0.072313 +0.546151 +0.770769 +0.734662 +0.041416 +0.216761 +0.872500 +0.018154 +0.154102 +0.633906 +0.630096 +0.090650 +0.948673 +0.644527 +0.081791 +0.823531 +0.710793 +0.017646 +0.837318 +0.477669 +0.617423 +0.463870 +0.430453 +0.102915 +0.944678 +0.529091 +0.219543 +0.501098 +0.450297 +0.460249 +0.151804 +0.154258 +0.636663 +0.560290 +0.373488 +0.518024 +0.935917 +0.355258 +0.366451 +0.662374 +0.963281 +0.345274 +0.085744 +0.450522 +0.944623 +0.497636 +0.236404 +0.261338 +0.266521 +0.161144 +0.135555 +0.877839 +0.277124 +0.733300 +0.985223 +0.705867 +0.943236 +0.824726 +0.796957 +0.038520 +0.655725 +0.808786 +0.645583 +0.360199 +0.805484 +0.785994 +0.442733 +0.223750 +0.690389 +0.841780 +0.334358 +0.522339 +0.999853 +0.401341 +0.102672 +0.944733 +0.253097 +0.764226 +0.579684 +0.998233 +0.045584 +0.190237 +0.603528 +0.454818 +0.200226 +0.245697 +0.631481 +0.440533 +0.705761 +0.508100 +0.699835 +0.718551 +0.959472 +0.060885 +0.740501 +0.023952 +0.312280 +0.938897 +0.529176 +0.139696 +0.007394 +0.037149 +0.370979 +0.079857 +0.356522 +0.170617 +0.768632 +0.892857 +0.200543 +0.594937 +0.236659 +0.110761 +0.772857 +0.089150 +0.370752 +0.579609 +0.160998 +0.627006 +0.480205 +0.363870 +0.709307 +0.857296 +0.538654 +0.608966 +0.008823 +0.805880 +0.855159 +0.901079 +0.436995 +0.329614 +0.995158 +0.133296 +0.822028 +0.080619 +0.230430 +0.260661 +0.091957 +0.987801 +0.821176 +0.007441 +0.085292 +0.025058 +0.668865 +0.286767 +0.281554 +0.488178 +0.746536 +0.234882 +0.371152 +0.343144 +0.782542 +0.966381 +0.910424 +0.354012 +0.739842 +0.892076 +0.910463 +0.293241 +0.098992 +0.863976 +0.200278 +0.388794 +0.830459 +0.140099 +0.948659 +0.801992 +0.495777 +0.689037 +0.068997 +0.191340 +0.315491 +0.906900 +0.391024 +0.548285 +0.963825 +0.316247 +0.388872 +0.081584 +0.168269 +0.716728 +0.271434 +0.402498 +0.655245 +0.621597 +0.150448 +0.720021 +0.758401 +0.814052 +0.724206 +0.278217 +0.005596 +0.384313 +0.693170 +0.772095 +0.329117 +0.740499 +0.598176 +0.509378 +0.772531 +0.810346 +0.371972 +0.643491 +0.999270 +0.683761 +0.403973 +0.255705 +0.337149 +0.515590 +0.780687 +0.233481 +0.525191 +0.917882 +0.379095 +0.033680 +0.884923 +0.121695 +0.460729 +0.295708 +0.338587 +0.185563 +0.171409 +0.996192 +0.744787 +0.590434 +0.874650 +0.683373 +0.326949 +0.155498 +0.453853 +0.052286 +0.887284 +0.834294 +0.770169 +0.928907 +0.911002 +0.362302 +0.770312 +0.951780 +0.425063 +0.469295 +0.243280 +0.567083 +0.764251 +0.412832 +0.549176 +0.381205 +0.571948 +0.493807 +0.672663 +0.672205 +0.672735 +0.491061 +0.085949 +0.908478 +0.831180 +0.583019 +0.439981 +0.722353 +0.528234 +0.302612 +0.960280 +0.182866 +0.613355 +0.205029 +0.785030 +0.103700 +0.575222 +0.188871 +0.093692 +0.215182 +0.532926 +0.358267 +0.388443 +0.042054 +0.871617 +0.020745 +0.831793 +0.280047 +0.647799 +0.056769 +0.953869 +0.089522 +0.820935 +0.958010 +0.055616 +0.511760 +0.799711 +0.446693 +0.134260 +0.856146 +0.469406 +0.528315 +0.685068 +0.026576 +0.721143 +0.912583 +0.291453 +0.045920 +0.826373 +0.573744 +0.750366 +0.677608 +0.845253 +0.924185 +0.740364 +0.081345 +0.406642 +0.121412 +0.775286 +0.545259 +0.163650 +0.686200 +0.575181 +0.675886 +0.462442 +0.548769 +0.715812 +0.913197 +0.820039 +0.114864 +0.353937 +0.547880 +0.094308 +0.954717 +0.458342 +0.474745 +0.121222 +0.359985 +0.544749 +0.090595 +0.288283 +0.137531 +0.485125 +0.665258 +0.873964 +0.244343 +0.842194 +0.984668 +0.034523 +0.592971 +0.486010 +0.380907 +0.386120 +0.354270 +0.860691 +0.602308 +0.932447 +0.387703 +0.578253 +0.347608 +0.200793 +0.251227 +0.055266 +0.644498 +0.799907 +0.522716 +0.676829 +0.906607 +0.217712 +0.464576 +0.244890 +0.093321 +0.112632 +0.070390 +0.774974 +0.689898 +0.304370 +0.000100 +exit 0 Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backpropKernel.c =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backpropKernel.c +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/backpropKernel.c @@ -0,0 +1,84 @@ +/* + ****************************************************************** + * HISTORY + * 15-Oct-94 Jeff Shufelt (js), Carnegie Mellon University + * Prepared for 15-681, Fall 1994. + * Modified by Shuai Che + * 28-May-2018: Modified by Pankaj Kukreja, + * Indian Institute of Technology Hyderabad, India + ****************************************************************** + */ +#include "backprop.h" +#include +#include + +float bpnn_train_kernel( + int in, int hid, int out, float input_units[static const restrict in + 1], + float hidden_units[static const restrict hid + 1], + float output_units[static const restrict out + 1], + float hidden_delta[static const restrict hid + 1], + float output_delta[static const restrict out + 1], + float target[static const restrict out + 1], + float input_weights[static const restrict in + 1][1 + hid], + float hidden_weights[static const restrict hid + 1][out + 1], + float input_prev_weights[static const restrict in + 1][hid + 1], + float hidden_prev_weights[static const restrict hid + 1][out + 1], + int iterations) { + + float output_error = 0.0; + + for (int iteration = 0; iteration < iterations; iteration++) { + double sum; + int j, k; + + input_units[0] = 1.0; + for (j = 1; j <= hid; j++) { + sum = 0.0; + for (k = 0; k <= in; k++) { + sum += input_weights[k][j] * input_units[k]; + } + hidden_units[j] = (1.0 / (1.0 + exp(-1 * sum))); + } + + hidden_units[0] = 1.0; + for (j = 1; j <= out; j++) { + sum = 0.0; + for (k = 0; k <= hid; k++) { + sum += hidden_weights[k][j] * hidden_units[k]; + } + output_units[j] = (1.0 / (1.0 + exp(-sum))); + } + + float o, t; + output_error = 0.0; + for (j = 1; j <= out; j++) { + o = output_units[j]; + t = target[j]; + output_delta[j] = o * (1.0 - o) * (t - o); + output_error += fabs(output_delta[j]); + } + + float new_dw; + hidden_units[0] = 1.0; + for (j = 1; j <= out; j++) { + for (k = 0; k <= hid; k++) { + new_dw = ((ETA * output_delta[j] * hidden_units[k]) + + (MOMENTUM * hidden_prev_weights[k][j])); + hidden_weights[k][j] += new_dw; + hidden_prev_weights[k][j] = new_dw; + } + } + + input_units[0] = 1.0; + for (j = 1; j <= hid; j++) { + for (k = 0; k <= in; k++) { + new_dw = ((ETA * hidden_delta[j] * input_units[k]) + + (MOMENTUM * input_prev_weights[k][j])); + input_weights[k][j] += new_dw; + input_prev_weights[k][j] = new_dw; + } + } + } + return (output_error); + printf("%f\n", output_error); +} Index: test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/main.c =================================================================== --- test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/main.c +++ test-suite/trunk/MultiSource/Benchmarks/Rodinia/backprop/main.c @@ -0,0 +1,125 @@ +#include "backprop.h" +#include "glibc_compat_rand.h" +#include // INT_MAX +#include // printf +#include // malloc, free +float bpnn_train_kernel(int in, int hid, int out, float *input_units, + float *hidden_units, float *output_units, + float *hidden_delta, float *output_delta, float *target, + float *input_weights, float *hidden_weights, + float *input_prev_weights, float *hidden_prev_weights, + int iterations); + +void bpnn_dump(int in, int hid, int out, float *input_weights, + float *hidden_weights, float error); + +//////////////////////////////////////////////////////////////////////////////// +// Program main +//////////////////////////////////////////////////////////////////////////////// + +int main() { + glibc_compat_srand(SEED); + float *input_weights = + malloc(sizeof(float) * (1 + LAYERSIZE) * (1 + HIDDEN_SIZE)); + float *input_prev_weights = + malloc(sizeof(float) * (1 + LAYERSIZE) * (1 + HIDDEN_SIZE)); + float *hidden_weights = + malloc(sizeof(float) * (1 + HIDDEN_SIZE) * (1 + OUTPUT_SIZE)); + float *hidden_prev_weights = + malloc(sizeof(float) * (1 + HIDDEN_SIZE) * (1 + OUTPUT_SIZE)); + float *input_units = malloc(sizeof(float) * (LAYERSIZE + 1)); + float *hidden_units = malloc(sizeof(float) * (HIDDEN_SIZE + 1)); + float *hidden_delta = malloc(sizeof(float) * (HIDDEN_SIZE + 1)); + float *output_units = malloc(sizeof(float) * (OUTPUT_SIZE + 1)); + float *output_delta = malloc(sizeof(float) * (OUTPUT_SIZE + 1)); + float *target = malloc(sizeof(float) * (OUTPUT_SIZE + 1)); + + // Initializing input and hidden weights to random + int i, j; + for (i = 0; i <= LAYERSIZE; i++) { + for (j = 0; j <= HIDDEN_SIZE; j++) { + input_weights[i * (1 + HIDDEN_SIZE) + j] = + (float)glibc_compat_rand() / INT_MAX; + } + } + + for (i = 0; i <= HIDDEN_SIZE; i++) { + for (j = 0; j <= OUTPUT_SIZE; j++) { + hidden_weights[i * (1 + OUTPUT_SIZE) + j] = + (float)glibc_compat_rand() / INT_MAX; + } + } + + for (i = 0; i <= LAYERSIZE; i++) { + for (j = 0; j <= HIDDEN_SIZE; j++) { + input_prev_weights[i * (1 + HIDDEN_SIZE) + j] = 0.0; + } + } + + for (i = 0; i <= HIDDEN_SIZE; i++) { + for (j = 0; j <= OUTPUT_SIZE; j++) { + hidden_prev_weights[i * (1 + OUTPUT_SIZE) + j] = 0.0; + } + } + + for (i = 0; i <= OUTPUT_SIZE; i++) { + target[i] = 0.1; + output_units[i] = 0.0; + output_delta[i] = 0.0; + } + + // Random input + input_units[0] = 0; + for (int i = 0; i <= LAYERSIZE; i++) { + input_units[i] = (float)glibc_compat_rand() / INT_MAX; + } + + for (i = 0; i <= HIDDEN_SIZE; i++) { + hidden_units[i] = 0.0; + hidden_delta[i] = 0.0; + } + + float error = 0.0; + // entering the training kernel + error = bpnn_train_kernel( + LAYERSIZE, HIDDEN_SIZE, OUTPUT_SIZE, input_units, hidden_units, + output_units, hidden_delta, output_delta, target, input_weights, + hidden_weights, input_prev_weights, hidden_prev_weights, ITERATIONS); + + bpnn_dump(LAYERSIZE, HIDDEN_SIZE, OUTPUT_SIZE, input_weights, hidden_weights, + error); + + free(input_weights); + free(hidden_weights); + free(input_prev_weights); + free(hidden_prev_weights); + free(input_units); + free(hidden_units); + free(output_units); + free(hidden_delta); + free(output_delta); + free(target); + return 0; +} + +void bpnn_dump(int in, int hid, int out, float *input_weights, + float *hidden_weights, float error) { + int i, j; + + fflush(stdout); + for (i = 0; i <= in; i++) { + for (j = 0; j <= hid; j++) { + if ((i * (1 + hid) + j) % GAP == 0) { + printf("%.6f\n", input_weights[i * (1 + hid) + j]); + } + } + } + for (i = 0; i <= hid; i++) { + for (j = 0; j <= out; j++) { + if ((i * (out + 1) + j) % GAP == 0) { + printf("%.6f\n", hidden_weights[i * (1 + out) + j]); + } + } + } + printf("%f\n", error); +}