$stdout.sync = true $stderr.sync = true require 'net/http' require 'uri' require 'json' require 'fileutils' require 'openssl' BASE_URL = 'un-project.wisencn.com' USERNAME = 'Geo_Spain' PASSWORD = 'wisenmeshnet' UPDATE_INTERVAL = 60 MAX_POINTS = 1440 # 24 hours at 1 point/min # Determine output directory BASE_PATH = File.basename(__dir__) == 'src' ? __dir__ : File.join(__dir__, 'src') # Configuration for SSL in some environments OpenSSL::SSL.send(:remove_const, :VERIFY_PEER) OpenSSL::SSL.const_set(:VERIFY_PEER, OpenSSL::SSL::VERIFY_NONE) PROJECTS = { 'pernet' => { 'id' => '4595', 'type' => '4109', 'nodes' => %w[49849 49857 49854 50463 49853 49850 49848 50093 49855 49852], 'output' => File.join(BASE_PATH, 'pernet_data.json') }, 'cartagena' => { 'id' => '4480', 'type' => '4109', # Assuming same type as Pernet 'nodes' => %w[47633 48615 48613 48610 48612 48007 48609 48614 48713 48611 48714], 'output' => File.join(BASE_PATH, 'cartagena_data.json') }, 'cartagena_tilts' => { 'id' => '4622', 'type' => '4109', 'nodes' => %w[18563D90 18714FAD 18714F48 18714FF0 187150E9 187151C0], 'output' => File.join(BASE_PATH, 'cartagena_tilts.json') } } def build_http h = Net::HTTP.new(BASE_URL, 443) h.use_ssl = true h.verify_mode = OpenSSL::SSL::VERIFY_NONE h.open_timeout = 20 h.read_timeout = 25 h end def merge_cookies(existing, response) new_cookies = [] if response.respond_to?(:get_fields) new_cookies = (response.get_fields('set-cookie') || []).map { |c| c.split(';').first } elsif response.is_a?(String) new_cookies = response.split(';').map(&:strip) end hash = {} (existing.to_s.split('; ').reject(&:empty?) + new_cookies).each do |c| k, v = c.split('=', 2) hash[k.strip] = v.to_s if k end hash.map { |k, v| "#{k}=#{v}" }.join('; ') end def full_login_session(http) # 1. Get initial cookies from login page req0 = Net::HTTP::Get.new('/control/login.html') req0['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36' res0 = http.request(req0) cookies = merge_cookies('', res0) # 2. Login req = Net::HTTP::Post.new('/control/login/login.action') req.set_form_data( 'userIP.user.username' => USERNAME, 'userIP.user.password' => PASSWORD ) req['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36' req['Content-Type'] = 'application/x-www-form-urlencoded' req['Referer'] = "https://#{BASE_URL}/control/login.html" req['Cookie'] = cookies req['Accept'] = 'application/json, text/javascript, */*; q=0.01' req['X-Requested-With'] = 'XMLHttpRequest' r1 = http.request(req) body = r1.body.to_s begin json = JSON.parse(body) if json['username'] == USERNAME log_id = json['userLogId'] c = merge_cookies(cookies, r1) # Add explicit cookies observed in browser c = merge_cookies(c, "wisenLogId=#{log_id}; wisenUserName=#{USERNAME}; wisenUserId=1750") return { 'cookies' => c, 'log_id' => log_id } end rescue => e puts "[ERROR] login parse: #{e.message}" end nil rescue => e puts "[ERROR] login session: #{e.message}" nil end def fetch_integrated(http, cookies, project_id) # Use the project_id specifically in the cookie as well full_cookies = merge_cookies(cookies, "projectId=#{project_id}") req = Net::HTTP::Post.new('/web_ext/nodeData/integratedInfos.action') req.set_form_data('proId' => project_id) req['Cookie'] = full_cookies req['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36' req['X-Requested-With'] = 'XMLHttpRequest' req['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8' req['Referer'] = "https://#{BASE_URL}/web_ext/integratedInfo.html?proId=#{project_id}" req['Accept'] = 'application/json, text/plain, */*' req['Origin'] = "https://#{BASE_URL}" res = http.request(req) body = res.body.to_s.strip if body.empty? || body == 'logout' || body == 'np' return nil end return nil if res.code.to_i >= 300 JSON.parse(body) rescue JSON::ParserError => e puts "[ERROR] JSON: #{e.message.slice(0, 80)}" nil rescue => e puts "[ERROR] fetch: #{e.message}" nil end def extract_value(data_str) return 0.0 if data_str.nil? || data_str.empty? v = data_str.split(',')[14]&.to_f (v && v != 0.0) ? v.round(4) : 0.0 end def load_existing(file) return [] unless File.exist?(file) begin; JSON.parse(File.read(file)); rescue; []; end end def save_data(file, list) FileUtils.mkdir_p(File.dirname(file)) File.write(file, JSON.generate(list)) end # ---- Main ---- puts "[START] Wisen multi-project fetcher" PROJECTS.values.each { |p| FileUtils.mkdir_p(File.dirname(p['output'])) } session = nil http = nil cookies = nil loop do begin http ||= build_http if session.nil? puts "[INFO] Logging in..." session = full_login_session(http) if session puts "[OK] Session ready (LogID: #{session['log_id']})" cookies = session['cookies'] else puts "[ERROR] Auth failed, retry in 30s" sleep 30 http = nil next end end PROJECTS.each do |name, config| # 3. Visit project page to "activate" it in session req_p = Net::HTTP::Get.new("/web_ext/integratedInfo.html?proId=#{config['id']}") req_p['Cookie'] = merge_cookies(cookies, "projectId=#{config['id']}") req_p['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36' res_p = http.request(req_p) # Use updated cookies for total session but keep base cookies separate if needed # Actually browsers just keep accumulating cookies = merge_cookies(cookies, res_p) project_cookies = merge_cookies(cookies, "projectId=#{config['id']}") raw = fetch_integrated(http, project_cookies, config['id']) if raw.nil? puts "[WARN] [#{name}] No se pudo obtener datos de Wisen (API/Auth error) a las #{Time.now.strftime('%H:%M')}" session = nil http = nil next end node_list = raw[config['type']] || [] if node_list.empty? node_list = raw.values.select { |v| v.is_a?(Array) }.flatten end val_map = {} node_list.each do |entry| nid = entry['nodeId'].to_s mac = entry['dataAndNetInfo'].to_s.split(',')[0] if name == 'cartagena_tilts' && entry['expData'] && !entry['expData'].empty? val = entry['expData'].to_s.split(',')[0].to_f val = val.round(4) else val = extract_value(entry['dataAndNetInfo'].to_s) end val_map[nid] = val val_map[mac] = val if mac && !mac.empty? end values = config['nodes'].map { |nid| val_map[nid] || 0.0 } ts_ms = (Time.now.to_f * 1000).to_i if node_list.first parts = node_list.first['dataAndNetInfo'].to_s.split(',') ts = parts[2]&.to_i ts_ms = ts if ts && ts > 1_000_000_000_000 end # Comprobar si el dato es viejo (más de 1 hora) is_stale = (Time.now.to_i - ts_ms/1000) > 3600 point = { 'time' => ts_ms, 'cells' => values } list = load_existing(config['output']) if list.last && list.last['time'] == ts_ms stale_status = is_stale ? " (DATO ANTIGUO EN WISEN)" : "" puts "[INFO] [#{name}] Sin datos nuevos en Wisen#{stale_status}. Último: #{Time.at(ts_ms / 1000).strftime('%H:%M')}" else list << point list.shift if list.size > MAX_POINTS save_data(config['output'], list) puts "[OK] [#{name}] Nueva lectura: #{Time.at(ts_ms / 1000).strftime('%H:%M')} — #{values.first(3).map { |v| '%.2f' % v }.join(', ')}..." end end rescue => e puts "[FATAL] #{e.class}: #{e.message}" cookies = nil http = nil end sleep UPDATE_INTERVAL end