1
0

index.html 8.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. <!doctype html>
  2. <html lang="en">
  3. <head>
  4. <!-- Required meta tags -->
  5. <meta charset="utf-8">
  6. <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
  7. <script src="https://cdn.jsdelivr.net/npm/@vladmandic/human/dist/human.js"></script>
  8. <script src="https://code.jquery.com/jquery-3.7.0.min.js"></script>
  9. </head>
  10. <body>
  11. <div id="data-container"></div>
  12. {# <form id="data-form" action="/send_data" method="post"> #}
  13. {# <input type="hidden" name="data" id="data-input"> #}
  14. {# <input type="submit" value="Отправить данные"> #}
  15. <canvas id="canvas" style="margin: 0 auto; width: 100%"></canvas>
  16. <pre id="log" style="padding: 8px; position: fixed; bottom: 0"></pre>
  17. <script>
  18. console.log("start", Human);
  19. const humanConfig = { // user configuration for human, used to fine-tune behavior
  20. modelBasePath: 'https://cdn.jsdelivr.net/npm/@vladmandic/human/models/', // models can be loaded directly from cdn as well
  21. filter: { enabled: true, equalization: true, flip: false },
  22. face: { enabled: true, detector: { rotation: false }, mesh: { enabled: false }, attention: { enabled: true }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
  23. body: { enabled: false },
  24. hand: { enabled: false },
  25. gesture: { enabled: false },
  26. object: { enabled: false },
  27. segmentation: { enabled: false },
  28. };
  29. const human = new Human.Human(humanConfig);
  30. //console.log("continue", human);
  31. const canvas = document.getElementById('canvas');
  32. //const dataForm = document.getElementById('data-form');
  33. //const dataInput = document.getElementById('data-input');
  34. //const canvas = $('#canvas').get(0)
  35. var interpolated;
  36. var need_generation = true;
  37. var need_playing = true;
  38. var text;
  39. var prompt;
  40. function splitTextIntoLines(text, wordsPerLine) {
  41. const words = text.split(' ');
  42. let line = '';
  43. let j = 0;
  44. for (let i = 0; i < words.length; i++) {
  45. if (j < wordsPerLine) {
  46. line += words[i] + ' ';
  47. j += 1;
  48. } else {
  49. line += '\n';
  50. j = 0;
  51. }
  52. }
  53. return line;
  54. }
  55. async function drawLoop() { // main screen refresh loop
  56. interpolated = human.next(); // get smoothened result using last-known results which are continously updated based on input webcam video
  57. human.draw.canvas(human.webcam.element, canvas); // draw webcam video to screen canvas // better than using procesed image as this loop happens faster than processing loop
  58. await human.draw.all(canvas, interpolated);
  59. document.getElementById('log').innerHTML =
  60. `human version: ${human.version} | ` +
  61. `tfjs version: ${human.tf.version['tfjs-core']}<br>` +
  62. `platform: ${human.env.platform} | ` +
  63. `agent ${human.env.agent}<br>` +
  64. `need_generation ${need_generation}<br>` + // draw labels, boxes, lines, etc.
  65. `prompt ${prompt}<br>` +
  66. `text: ${text}`;
  67. }
  68. async function playAudio(audioSrc) {
  69. console.log('playing audio')
  70. const audioPlayer = new Audio(audioSrc);
  71. audioPlayer.addEventListener('ended', async function () {
  72. need_generation = true;
  73. need_playing = true;
  74. text = '';
  75. console.log('playing done');
  76. // Отправляем запрос на сервер для удаления файла
  77. $.ajax({
  78. url: '/delete_audio',
  79. type: 'POST',
  80. contentType: 'application/json',
  81. data: JSON.stringify({ filename: audioSrc }),
  82. success: function (data) {
  83. console.log('Delete response:', data.message);
  84. },
  85. error: function (jqXHR, textStatus, errorThrown) {
  86. console.error('Ошибка при удалении аудио файла:', textStatus, errorThrown);
  87. }
  88. });
  89. });
  90. audioPlayer.play();
  91. }
  92. async function checkForNewAudio() {
  93. $.ajax({
  94. url: '/check_audio',
  95. method: 'GET',
  96. success: function (response) {
  97. // need_generation = response.need_generation;
  98. if (response.newAudio && need_playing) {
  99. console.log(response.newAudio)
  100. // Если есть новый аудиофайл, проигрывайте его на странице
  101. text = splitTextIntoLines(response.text, 20);
  102. prompt = splitTextIntoLines(response.prompt, 20);
  103. need_generation = false;
  104. need_playing = false;
  105. playAudio(response.filename);
  106. }
  107. console.log("check audio done")
  108. console.log("text is " + text)
  109. console.log("filename is " + response.filename)
  110. },
  111. error: function (jqXHR, textStatus, errorThrown) {
  112. console.error('Ошибка при проверке наличия нового аудиофайла:', textStatus, errorThrown);
  113. }
  114. });
  115. }
  116. async function send_data() {
  117. $.ajax({
  118. url: '/send_data',
  119. type: 'POST',
  120. data: { data: JSON.stringify(interpolated), state: need_generation },
  121. success: function (response) {
  122. console.log('face data sent!');
  123. },
  124. error: function (jqXHR, textStatus, errorThrown) {
  125. console.log("DATA WAS NOW SENT, ALARM!");
  126. }
  127. });
  128. };
  129. let drawLoopIntervalId;
  130. let sendDataIntervalId;
  131. let checkForNewAudioIntervalId;
  132. async function main() { // main entry point
  133. document.getElementById('log').innerHTML =
  134. `human version: ${human.version} | ` +
  135. `tfjs version: ${human.tf.version['tfjs-core']} <br>` +
  136. `platform: ${human.env.platform} | ` +
  137. `agent ${human.env.agent}<br>` +
  138. `need_generation ${need_generation}<br>` +
  139. `text: ${text}`;
  140. await human.webcam.start({ crop: true }); // find webcam and start it
  141. human.video(human.webcam.element); // instruct human to continously detect video frames
  142. canvas.width = human.webcam.width; // set canvas resolution to input webcam native resolution
  143. canvas.height = human.webcam.height;
  144. canvas.onclick = async () => { // pause when clicked on screen and resume on next click
  145. if (human.webcam.paused) {
  146. await human.webcam.play();
  147. drawLoopIntervalId = setInterval(drawLoop, 30);
  148. sendDataIntervalId = setInterval(send_data, 1000);
  149. checkForNewAudioIntervalId = setInterval(checkForNewAudio, 5000);
  150. }
  151. else {
  152. human.webcam.pause();
  153. clearInterval(drawLoopIntervalId);
  154. clearInterval(sendDataIntervalId);
  155. clearInterval(checkForNewAudioIntervalId);
  156. }
  157. };
  158. drawLoopIntervalId = setInterval(drawLoop, 30);
  159. sendDataIntervalId = setInterval(send_data, 1000);
  160. checkForNewAudioIntervalId = setInterval(checkForNewAudio, 5000);
  161. };
  162. window.onload = main;
  163. </script>
  164. </body>
  165. </html>