DP-HLS
All Classes Namespaces Files Functions Variables Typedefs Macros Pages
pyapi.h
Go to the documentation of this file.
1 #ifndef PYAPI_H
2 #define PYAPI_H
3 
4 #include "./params.h"
5 #include <string>
6 #include <vector>
7 #include <unordered_map>
8 #include <type_traits>
9 #include "debug.h"
10 #include "utils.h"
11 #include <ap_int.h>
12 #include <ap_fixed.h>
13 #include <pybind11/pybind11.h>
14 #include <pybind11/stl.h>
15 using namespace std;
16 namespace py = pybind11;
17 
19 {
20 public:
21  list<unordered_map<int, tuple<>>> data;
22  // Normally the LoopLogger assume each exec to the loop boody is sequential execution. However in HLS this could
23  // be loop unrolling to multiple hardware. This is the flag to mark it.
24  bool parallel = false;
25 };
26 
27 
28 
30 {
31 public:
32  // There must be only one logger throughout the entire program and multiple files.
33  template <typename... Args>
34  static unordered_map<string, unordered_map<string, list<unordered_map<int, tuple<Args...>>>>> timelines;
35 
36  template <typename... Args>
37  static auto logImpl(const Args &...args)
38  {
39  return std::make_tuple(args ...); // FIXME: Need recursive Cast
40  }
41 
54  template <typename... Args>
55  static void log(string func, string name, int i, bool init, const Args &...args)
56  {
57  if (init)
58  {
59  // start a new item representing a new call to the loop
60  static unordered_map<int, tuple<Args...>> new_ordered_map;
61  (LoopLogger::timelines<Args...>[func][name]).push_back(new_ordered_map);
62  }
63 
64  auto current_map = LoopLogger::timelines<Args...>[func][name].back();
65  current_map.emplace(i, LoopLogger::logImpl(args...)); // A Tuple
66  }
67 
68  template<typename... Args>
69  unordered_map<string, unordered_map<string, list<unordered_map<int, tuple<Args...>> >>> get_timelines(){
70  return LoopLogger::timelines<Args...>;
71  }
72 };
73 
74 
75 class AHRunner
76 { // AlignHLS Runner
77 public:
78  string query, reference;
80 
82  {
83  this->query = "";
84  this->reference = "";
85  this->logger = LoopLogger();
86  };
87 
88  AHRunner(string query_string, string reference_string)
89  {
90  this->query = query_string;
91  this->reference = reference_string;
92  this->logger = LoopLogger();
93  };
94 
95 
96 
102  void run(py::dict py_penalties);
103 
104 
111  void run(string query_string, string reference_string, py::dict py_penalties);
112 
113 
120  std::vector<std::vector<char>> get_traceback_path();
121 
128  std::vector<std::vector<std::vector<std::vector<float>>>> get_scores();
129 
130  // FIXME: Wanted to have scores snapshot, but it requires a 5 dimensional vector.
131  // This feature is not sure to implement yet.
132  // std::vector<std::vector<std::vector<std::vector
133 
134 private:
135  string query_string;
136  string reference_string;
137 
138  tbr_t tb_streams[N_BLOCKS][MAX_REFERENCE_LENGTH + MAX_QUERY_LENGTH];
139 
140  // FIXME: Scores could be represented by a simpler format.
141  hls::vector<type_t, N_LAYERS> scores[N_BLOCKS][MAX_QUERY_LENGTH][MAX_REFERENCE_LENGTH];
142 
143  static char_t base_to_num(char base);
144 };
145 
146 // // >>> Multidim Cast with decltype >>>
147 
148 // // Specialization for non-vector types (terminal case)
149 // template <typename T>
150 // int CastToFloat(const T& val) {
151 // return val.to_float();
152 // }
153 
154 
155 // // Define CastNestedToInteger function for nested arrays or vectors
156 // template <typename T>
157 // auto CastToFloat(const T& input) -> decltype(input[0].to_float()) {
158 // using ElementT = decltype(input[0].to_float());
159 // std::vector<ElementT> result(input.size());
160 // for (size_t i = 0; i < input.size(); ++i) {
161 // result[i] = input[i].to_float();
162 // }
163 // return result;
164 // }
165 
166 // // Recursive template for multidimensional array
167 // template <typename T>
168 // auto CastMultiDimArrayToFloat(const T& input) -> decltype(CastToFloat(input[0])) {
169 // using ElementT = decltype(CastToFloat(input[0]));
170 // std::vector<ElementT> result(input.size());
171 // for (size_t i = 0; i < input.size(); ++i) {
172 // result[i] = CastToFloat(input[i]);
173 // }
174 // return result;
175 // }
176 
177 // // <<< Multidim Cast with decltype <<<
178 
179 // // Type traits
180 // template <typename>
181 // struct is_ap_uint_ : public std::false_type { };
182 // template <int N>
183 // struct is_ap_uint_<ap_uint<N>> : public std::true_type { };
184 // template <typename T>
185 // constexpr bool is_ap_uint (T const &) { return is_ap_uint_<T>::value; };
186 
187 // template <typename>
188 // struct is_ap_fixed_ : public std::false_type { };
189 // template <int M, int N>
190 // struct is_ap_fixed_<ap_fixed<M, N>> : public std::true_type { };
191 // template <typename T>
192 // constexpr bool is_ap_fixed (T const &) { return is_ap_fixed_<T>::value; };
193 
194 // template <typename>
195 // struct is_ap_int_ : public std::false_type { };
196 // template <int N>
197 // struct is_ap_int_<ap_int<N>> : public std::true_type { };
198 // template <typename T>
199 // constexpr bool is_ap_int (T const &) { return is_ap_int_<T>::value; };
200 
201 // template <typename T>
202 // bool is_hls_scalar(T val){
203 // return is_ap_uint(val) || is_ap_fixed(val) || is_ap_int(val);
204 // }
205 
206 // // The recursive function multidimensional casting iterate through all the element in that dimension
207 // // and call itself recurisvely. If it reaches the lowest dimension, it return. If not, initialize a vector and put the element
208 // // in the multidimensional cast into the vector.
209 // // Assume the HLS array are stored in contiguous memory since it uses primitive C++ array.
210 
211 
212 // // Helper function to check if an object is an instance of MyTemplate
213 // // Multidim cast for array
214 // template <typename THLS, typename TSTD, int N>
215 // std::vector<TSTD> MultidimCast(THLS *arr){
216 // std::vector<TSTD> result;
217 // if (is_hls_scalar(arr[0])){
218 // for (int i = 0; i < N; i++){
219 // result.push_back(arr[i].to_float());
220 // }
221 // }else {
222 // // If it's not a pointer, then it's a vector
223 // if (std::is_array(arr[0])){
224 // for (int i = 0; i < N; i++){
225 // result.push_back(MultidimCast(arr[i]));
226 // }
227 // }else {
228 // result = cast_hls_vector(arr);
229 // }
230 // }
231 // return result;
232 // }
233 
234 // template <typename T, int N>
235 // std::vector<float> cast_hls_vector(hls::vector<T, N> arr){
236 // std::vector<float> result;
237 // for (int i = 0; i < N; i++){
238 // result.push_back(arr[i].to_float());
239 // }
240 // return result;
241 // }
242 
243 #endif
static unordered_map< string, unordered_map< string, list< unordered_map< int, tuple< Args...> > > > > timelines
Definition: pyapi.h:34
Definition: pyapi.h:18
list< unordered_map< int, tuple<> > > data
Definition: pyapi.h:21
unordered_map< string, unordered_map< string, list< unordered_map< int, tuple< Args...> > > > > get_timelines()
Definition: pyapi.h:69
Definition: pyapi.h:75
Definition: pyapi.h:29
static void log(string func, string name, int i, bool init, const Args &...args)
Logger. All the data stored into this data structure should be std data type translated with the tran...
Definition: pyapi.h:55
AHRunner(string query_string, string reference_string)
Definition: pyapi.h:88
string reference
Definition: pyapi.h:78
static auto logImpl(const Args &...args)
Definition: pyapi.h:37
int base_to_num(char base)
Map a single base to a number. A: 0, C: 1, G: 2, T: 3, _: 4.
LoopLogger logger
Definition: pyapi.h:79
AHRunner()
Definition: pyapi.h:81
ap_uint< 3 > tbr_t
Definition: dp_hls_common.h:94