private void writeToCache(final byte[] b, final int off, int count) { if (!this.overflow && count > 0) { if (contentCacheLimit != null && count + cachedContent.size() > contentCacheLimit) { this.overflow = true; cachedContent.write(b, off, contentCacheLimit - cachedContent.size()); handleContentOverflow(contentCacheLimit); return; } cachedContent.write(b, off, count); } }
private void processDATA(final byte b) { currBytes.write(b); logger.trace("Data size is {}", new Object[] {currBytes.size()}); if (currBytes.size() >= frameBuilder.dataLength) { final byte[] data = currBytes.toByteArray(); frameBuilder.data(data); logger.debug("Reached expected data size of {}", new Object[] {frameBuilder.dataLength}); currBytes.reset(); currState = RELPState.TRAILER; } }
@Override public int read() throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(1); LeakyBucketStreamThrottler.this.copy(toWrap, baos, 1L); if (baos.size() < 1) { return -1; } return baos.toByteArray()[0] & 0xFF; }
@Override protected void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request, HttpServletResponse response) throws Exception { Object toBeMarshalled = locateToBeMarshalled(model); if (toBeMarshalled == null) { throw new IllegalStateException("Unable to locate object to be marshalled in model: " + model); } Assert.state(this.marshaller != null, "No Marshaller set"); ByteArrayOutputStream baos = new ByteArrayOutputStream(1024); this.marshaller.marshal(toBeMarshalled, new StreamResult(baos)); setResponseContentType(request, response); response.setContentLength(baos.size()); baos.writeTo(response.getOutputStream()); }
/** * 使用StringBuilder前,务必调用 */ @Override public void flush() throws IOException { if (null != baos) { baos.flush(); if (baos.size() > 0) { if (charset == null) sb.append(new String(baos.toByteArray())); else sb.append(new String(baos.toByteArray(), charset)); baos.reset(); } } }
Serializable ser; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(ser); oos.close(); return baos.size();
private String pageFile(String path, boolean isZipFile, long fileLength, Integer start, Integer readLength) throws IOException, InvalidRequestException { try (InputStream input = isZipFile ? new GZIPInputStream(new FileInputStream(path)) : new FileInputStream(path); ByteArrayOutputStream output = new ByteArrayOutputStream()) { if (start >= fileLength) { throw new InvalidRequestException("Cannot start past the end of the file"); } if (start > 0) { StreamUtil.skipBytes(input, start); } byte[] buffer = new byte[1024]; while (output.size() < readLength) { int size = input.read(buffer, 0, Math.min(1024, readLength - output.size())); if (size > 0) { output.write(buffer, 0, size); } else { break; } } numPageRead.mark(); return output.toString(); } catch (FileNotFoundException e) { numFileOpenExceptions.mark(); throw e; } catch (IOException e) { numFileReadExceptions.mark(); throw e; } }
@Override protected synchronized void writeHeader(final long firstEventId, final DataOutputStream out) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); eventSchema.writeTo(baos); out.writeInt(baos.size()); baos.writeTo(out); baos.reset(); headerSchema.writeTo(baos); out.writeInt(baos.size()); baos.writeTo(out); this.firstEventId = firstEventId; this.systemTimeOffset = System.currentTimeMillis(); final Map<String, Object> headerValues = new HashMap<>(); headerValues.put(EventIdFirstHeaderSchema.FieldNames.FIRST_EVENT_ID, firstEventId); headerValues.put(EventIdFirstHeaderSchema.FieldNames.TIMESTAMP_OFFSET, systemTimeOffset); headerValues.put(EventIdFirstHeaderSchema.FieldNames.COMPONENT_IDS, idLookup.getComponentIdentifiers()); headerValues.put(EventIdFirstHeaderSchema.FieldNames.COMPONENT_TYPES, idLookup.getComponentTypes()); headerValues.put(EventIdFirstHeaderSchema.FieldNames.QUEUE_IDS, idLookup.getQueueIdentifiers()); headerValues.put(EventIdFirstHeaderSchema.FieldNames.EVENT_TYPES, eventTypeNames); final FieldMapRecord headerInfo = new FieldMapRecord(headerSchema, headerValues); schemaRecordWriter.writeRecord(headerInfo, out); }
/** * Override ClassLoader's class resolving method. Don't call this directly, instead use {@link ClassLoader#loadClass(String)}. */ @Override public Class<?> findClass(String name) throws ClassNotFoundException { ByteArrayOutputStream byteCode = byteCodeForClasses.get(name); if (byteCode == null) { throw new ClassNotFoundException(name); } return defineClass(name, byteCode.toByteArray(), 0, byteCode.size()); }
@Override public String read() throws IOException { String message = Strings.EMPTY; try { while (true) { final int b = inputStream.read(); if (b == -1) { throw new EOFException("The stream has been closed or the end of stream has been reached"); } buffer.write(b); if (b == '\n') { break; } } } catch (final EOFException e) { if (buffer.size() > 0) { message = buffer.toString(); buffer.reset(); return message; } throw e; } message = buffer.toString(); buffer.reset(); return message; } }
/** * Write the given temporary OutputStream to the HTTP response. * @param response current HTTP response * @param baos the temporary OutputStream to write * @throws IOException if writing/flushing failed */ protected void writeToResponse(HttpServletResponse response, ByteArrayOutputStream baos) throws IOException { // Write content type and also length (determined via byte array). response.setContentType(getContentType()); response.setContentLength(baos.size()); // Flush byte array to servlet output stream. ServletOutputStream out = response.getOutputStream(); baos.writeTo(out); out.flush(); }
private static List<byte[]> parseParentsBytes(byte[] bytes) { List<byte[]> parents = new ArrayList<>(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); for (int i = 0; i < bytes.length; i++) { if (bytes[i] == ESCAPE_BYTE) { i++; if (bytes[i] == SEPARATED_BYTE) { parents.add(bos.toByteArray()); bos.reset(); continue; } // fall through to append the byte } bos.write(bytes[i]); } if (bos.size() > 0) { parents.add(bos.toByteArray()); } return parents; }
@Override public void write(DataOutput out) throws IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); // serialize path, offset, length using FileSplit super.write(dos); int required = bos.size(); // write addition payload required for orc writeAdditionalPayload(dos); int additional = bos.size() - required; out.write(bos.toByteArray()); if (LOG.isTraceEnabled()) { LOG.trace("Writing additional {} bytes to OrcSplit as payload. Required {} bytes.", additional, required); } }
private <T> void serialize(final T value, final Serializer<T> serializer, final DataOutputStream dos) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.serialize(value, baos); dos.writeInt(baos.size()); baos.writeTo(dos); }
private void flushBuffer(ByteArrayOutputStream buffer, SocketEndpoint webSocket) throws IOException { if (buffer.size() == 0) return; webSocket.send(ByteBuffer.wrap(maybeGzipIfLargeEnough(buffer.toByteArray()))); buffer.reset(); }
private int determineSize(Serializable ser) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(ser); oos.close(); return baos.size(); }
ByteArrayOutputStream os = new ByteArrayOutputStream(); if (os.size() > 0) { handleFrame(os); os.write(b);