resolved conflicts for merge of 2b652855 to master

Change-Id: I3604c95530823605d66cb56be7a37c55399a9271
This commit is contained in:
Chia-chi Yeh
2010-09-23 14:59:58 +08:00
9 changed files with 1110 additions and 296 deletions

View File

@@ -16,41 +16,133 @@
package android.net.rtp;
/** @hide */
import java.util.Arrays;
/**
* This class defines a collection of audio codecs to be used with
* {@link AudioStream}s. Their parameters are designed to be exchanged using
* Session Description Protocol (SDP). Most of the values listed here can be
* found in RFC 3551, while others are described in separated standards.
*
* <p>Few simple configurations are defined as public static instances for the
* convenience of direct uses. More complicated ones could be obtained using
* {@link #getCodec(int, String, String)}. For example, one can use the
* following snippet to create a mode-1-only AMR codec.</p>
* <pre>
* AudioCodec codec = AudioCodec.getCodec(100, "AMR/8000", "mode-set=1");
* </pre>
*
* @see AudioStream
* @hide
*/
public class AudioCodec {
public static final AudioCodec ULAW = new AudioCodec("PCMU", 8000, 160, 0);
public static final AudioCodec ALAW = new AudioCodec("PCMA", 8000, 160, 8);
/**
* The RTP payload type of the encoding.
*/
public final int type;
/**
* Returns system supported codecs.
* The encoding parameters to be used in the corresponding SDP attribute.
*/
public static AudioCodec[] getSystemSupportedCodecs() {
return new AudioCodec[] {AudioCodec.ULAW, AudioCodec.ALAW};
public final String rtpmap;
/**
* The format parameters to be used in the corresponding SDP attribute.
*/
public final String fmtp;
/**
* G.711 u-law audio codec.
*/
public static final AudioCodec PCMU = new AudioCodec(0, "PCMU/8000", null);
/**
* G.711 a-law audio codec.
*/
public static final AudioCodec PCMA = new AudioCodec(8, "PCMA/8000", null);
/**
* GSM Full-Rate audio codec, also known as GSM-FR, GSM 06.10, GSM, or
* simply FR.
*/
public static final AudioCodec GSM = new AudioCodec(3, "GSM/8000", null);
/**
* GSM Enhanced Full-Rate audio codec, also known as GSM-EFR, GSM 06.60, or
* simply EFR.
*/
public static final AudioCodec GSM_EFR = new AudioCodec(96, "GSM-EFR/8000", null);
/**
* Adaptive Multi-Rate narrowband audio codec, also known as AMR or AMR-NB.
* Currently CRC, robust sorting, and interleaving are not supported. See
* more details about these features in RFC 4867.
*/
public static final AudioCodec AMR = new AudioCodec(97, "AMR/8000", null);
// TODO: add rest of the codecs when the native part is done.
private static final AudioCodec[] sCodecs = {PCMU, PCMA};
private AudioCodec(int type, String rtpmap, String fmtp) {
this.type = type;
this.rtpmap = rtpmap;
this.fmtp = fmtp;
}
/**
* Returns the codec instance if it is supported by the system.
* Returns system supported audio codecs.
*/
public static AudioCodec[] getCodecs() {
return Arrays.copyOf(sCodecs, sCodecs.length);
}
/**
* Creates an AudioCodec according to the given configuration.
*
* @param name name of the codec
* @return the matched codec or null if the codec name is not supported by
* the system
* @param type The payload type of the encoding defined in RTP/AVP.
* @param rtpmap The encoding parameters specified in the corresponding SDP
* attribute, or null if it is not available.
* @param fmtp The format parameters specified in the corresponding SDP
* attribute, or null if it is not available.
* @return The configured AudioCodec or {@code null} if it is not supported.
*/
public static AudioCodec getSystemSupportedCodec(String name) {
for (AudioCodec codec : getSystemSupportedCodecs()) {
if (codec.name.equals(name)) return codec;
public static AudioCodec getCodec(int type, String rtpmap, String fmtp) {
if (type < 0 || type > 127) {
return null;
}
return null;
}
public final String name;
public final int sampleRate;
public final int sampleCount;
public final int defaultType;
AudioCodec hint = null;
if (rtpmap != null) {
String clue = rtpmap.trim().toUpperCase();
for (AudioCodec codec : sCodecs) {
if (clue.startsWith(codec.rtpmap)) {
String channels = clue.substring(codec.rtpmap.length());
if (channels.length() == 0 || channels.equals("/1")) {
hint = codec;
}
break;
}
}
} else if (type < 96) {
for (AudioCodec codec : sCodecs) {
if (type == codec.type) {
hint = codec;
rtpmap = codec.rtpmap;
break;
}
}
}
private AudioCodec(String name, int sampleRate, int sampleCount, int defaultType) {
this.name = name;
this.sampleRate = sampleRate;
this.sampleCount = sampleCount;
this.defaultType = defaultType;
if (hint == null) {
return null;
}
if (hint == AMR && fmtp != null) {
String clue = fmtp.toLowerCase();
if (clue.contains("crc=1") || clue.contains("robust-sorting=1") ||
clue.contains("interleaving=")) {
return null;
}
}
return new AudioCodec(type, rtpmap, fmtp);
}
}

View File

@@ -20,13 +20,63 @@ import java.util.HashMap;
import java.util.Map;
/**
* An AudioGroup acts as a router connected to the speaker, the microphone, and
* {@link AudioStream}s. Its pipeline has four steps. First, for each
* AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
* packets and stores in its buffer. Then, if the microphone is enabled,
* processes the recorded audio and stores in its buffer. Third, if the speaker
* is enabled, mixes and playbacks buffers of all AudioStreams. Finally, for
* each AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
* buffers and sends back the encoded packets. An AudioGroup does nothing if
* there is no AudioStream in it.
*
* <p>Few things must be noticed before using these classes. The performance is
* highly related to the system load and the network bandwidth. Usually a
* simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
* bandwidth, and vise versa. Using two AudioStreams at the same time not only
* doubles the load but also the bandwidth. The condition varies from one device
* to another, and developers must choose the right combination in order to get
* the best result.
*
* <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
* example, a Voice over IP (VoIP) application might want to put a conference
* call on hold in order to make a new call but still allow people in the
* previous call to talk to each other. This can be done easily using two
* AudioGroups, but there are some limitations. Since the speaker and the
* microphone are shared globally, only one AudioGroup is allowed to run in
* modes other than {@link #MODE_ON_HOLD}. In addition, before adding an
* AudioStream into an AudioGroup, one should always put all other AudioGroups
* into {@link #MODE_ON_HOLD}. That will make sure the audio driver correctly
* initialized.
* @hide
*/
/** @hide */
public class AudioGroup {
/**
* This mode is similar to {@link #MODE_NORMAL} except the speaker and
* the microphone are disabled.
*/
public static final int MODE_ON_HOLD = 0;
/**
* This mode is similar to {@link #MODE_NORMAL} except the microphone is
* muted.
*/
public static final int MODE_MUTED = 1;
/**
* This mode indicates that the speaker, the microphone, and all
* {@link AudioStream}s in the group are enabled. First, the packets
* received from the streams are decoded and mixed with the audio recorded
* from the microphone. Then, the results are played back to the speaker,
* encoded and sent back to each stream.
*/
public static final int MODE_NORMAL = 2;
public static final int MODE_EC_ENABLED = 3;
/**
* This mode is similar to {@link #MODE_NORMAL} except the echo suppression
* is enabled. It should be only used when the speaker phone is on.
*/
public static final int MODE_ECHO_SUPPRESSION = 3;
private final Map<AudioStream, Integer> mStreams;
private int mMode = MODE_ON_HOLD;
@@ -36,23 +86,42 @@ public class AudioGroup {
System.loadLibrary("rtp_jni");
}
/**
* Creates an empty AudioGroup.
*/
public AudioGroup() {
mStreams = new HashMap<AudioStream, Integer>();
}
/**
* Returns the current mode.
*/
public int getMode() {
return mMode;
}
/**
* Changes the current mode. It must be one of {@link #MODE_ON_HOLD},
* {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and
* {@link #MODE_ECHO_SUPPRESSION}.
*
* @param mode The mode to change to.
* @throws IllegalArgumentException if the mode is invalid.
*/
public synchronized native void setMode(int mode);
synchronized void add(AudioStream stream, AudioCodec codec, int codecType, int dtmfType) {
private native void add(int mode, int socket, String remoteAddress,
int remotePort, String codecSpec, int dtmfType);
synchronized void add(AudioStream stream, AudioCodec codec, int dtmfType) {
if (!mStreams.containsKey(stream)) {
try {
int socket = stream.dup();
String codecSpec = String.format("%d %s %s", codec.type,
codec.rtpmap, codec.fmtp);
add(stream.getMode(), socket,
stream.getRemoteAddress().getHostAddress(), stream.getRemotePort(),
codec.name, codec.sampleRate, codec.sampleCount, codecType, dtmfType);
stream.getRemoteAddress().getHostAddress(),
stream.getRemotePort(), codecSpec, dtmfType);
mStreams.put(stream, socket);
} catch (NullPointerException e) {
throw new IllegalStateException(e);
@@ -60,8 +129,7 @@ public class AudioGroup {
}
}
private native void add(int mode, int socket, String remoteAddress, int remotePort,
String codecName, int sampleRate, int sampleCount, int codecType, int dtmfType);
private native void remove(int socket);
synchronized void remove(AudioStream stream) {
Integer socket = mStreams.remove(stream);
@@ -70,8 +138,6 @@ public class AudioGroup {
}
}
private native void remove(int socket);
/**
* Sends a DTMF digit to every {@link AudioStream} in this group. Currently
* only event {@code 0} to {@code 15} are supported.
@@ -80,13 +146,16 @@ public class AudioGroup {
*/
public native synchronized void sendDtmf(int event);
public synchronized void reset() {
/**
* Removes every {@link AudioStream} in this group.
*/
public synchronized void clear() {
remove(-1);
}
@Override
protected void finalize() throws Throwable {
reset();
clear();
super.finalize();
}
}

View File

@@ -20,12 +20,27 @@ import java.net.InetAddress;
import java.net.SocketException;
/**
* AudioStream represents a RTP stream carrying audio payloads.
* An AudioStream is a {@link RtpStream} which carrys audio payloads over
* Real-time Transport Protocol (RTP). Two different classes are developed in
* order to support various usages such as audio conferencing. An AudioStream
* represents a remote endpoint which consists of a network mapping and a
* configured {@link AudioCodec}. On the other side, An {@link AudioGroup}
* represents a local endpoint which mixes all the AudioStreams and optionally
* interacts with the speaker and the microphone at the same time. The simplest
* usage includes one for each endpoints. For other combinations, users should
* be aware of the limitations described in {@link AudioGroup}.
*
* <p>An AudioStream becomes busy when it joins an AudioGroup. In this case most
* of the setter methods are disabled. This is designed to ease the task of
* managing native resources. One can always make an AudioStream leave its
* AudioGroup by calling {@link #join(AudioGroup)} with {@code null} and put it
* back after the modification is done.
*
* @see AudioGroup
* @hide
*/
/** @hide */
public class AudioStream extends RtpStream {
private AudioCodec mCodec;
private int mCodecType = -1;
private int mDtmfType = -1;
private AudioGroup mGroup;
@@ -42,7 +57,8 @@ public class AudioStream extends RtpStream {
}
/**
* Returns {@code true} if the stream already joined an {@link AudioGroup}.
* Returns {@code true} if the stream has already joined an
* {@link AudioGroup}.
*/
@Override
public final boolean isBusy() {
@@ -52,7 +68,7 @@ public class AudioStream extends RtpStream {
/**
* Returns the joined {@link AudioGroup}.
*/
public AudioGroup getAudioGroup() {
public AudioGroup getGroup() {
return mGroup;
}
@@ -74,35 +90,45 @@ public class AudioStream extends RtpStream {
mGroup = null;
}
if (group != null) {
group.add(this, mCodec, mCodecType, mDtmfType);
group.add(this, mCodec, mDtmfType);
mGroup = group;
}
}
/**
* Sets the {@link AudioCodec} and its RTP payload type. According to RFC
* 3551, the type must be in the range of 0 and 127, where 96 and above are
* dynamic types. For codecs with static mappings (non-negative
* {@link AudioCodec#defaultType}), assigning a different non-dynamic type
* is disallowed.
* Returns the {@link AudioCodec}, or {@code null} if it is not set.
*
* @see #setCodec(AudioCodec)
*/
public AudioCodec getCodec() {
return mCodec;
}
/**
* Sets the {@link AudioCodec}.
*
* @param codec The AudioCodec to be used.
* @param type The RTP payload type.
* @throws IllegalArgumentException if the type is invalid or used by DTMF.
* @throws IllegalArgumentException if its type is used by DTMF.
* @throws IllegalStateException if the stream is busy.
*/
public void setCodec(AudioCodec codec, int type) {
public void setCodec(AudioCodec codec) {
if (isBusy()) {
throw new IllegalStateException("Busy");
}
if (type < 0 || type > 127 || (type != codec.defaultType && type < 96)) {
throw new IllegalArgumentException("Invalid type");
}
if (type == mDtmfType) {
if (codec.type == mDtmfType) {
throw new IllegalArgumentException("The type is used by DTMF");
}
mCodec = codec;
mCodecType = type;
}
/**
* Returns the RTP payload type for dual-tone multi-frequency (DTMF) digits,
* or {@code -1} if it is not enabled.
*
* @see #setDtmfType(int)
*/
public int getDtmfType() {
return mDtmfType;
}
/**
@@ -111,7 +137,7 @@ public class AudioStream extends RtpStream {
* certain tasks, such as second-stage dialing. According to RFC 2833, the
* RTP payload type for DTMF is assigned dynamically, so it must be in the
* range of 96 and 127. One can use {@code -1} to disable DTMF and free up
* the previous assigned value. This method cannot be called when the stream
* the previous assigned type. This method cannot be called when the stream
* already joined an {@link AudioGroup}.
*
* @param type The RTP payload type to be used or {@code -1} to disable it.
@@ -127,7 +153,7 @@ public class AudioStream extends RtpStream {
if (type < 96 || type > 127) {
throw new IllegalArgumentException("Invalid type");
}
if (type == mCodecType) {
if (type == mCodec.type) {
throw new IllegalArgumentException("The type is used by codec");
}
}

View File

@@ -22,13 +22,25 @@ import java.net.Inet6Address;
import java.net.SocketException;
/**
* RtpStream represents a base class of media streams running over
* Real-time Transport Protocol (RTP).
* RtpStream represents the base class of streams which send and receive network
* packets with media payloads over Real-time Transport Protocol (RTP).
* @hide
*/
/** @hide */
public class RtpStream {
/**
* This mode indicates that the stream sends and receives packets at the
* same time. This is the initial mode for new streams.
*/
public static final int MODE_NORMAL = 0;
/**
* This mode indicates that the stream only sends packets.
*/
public static final int MODE_SEND_ONLY = 1;
/**
* This mode indicates that the stream only receives packets.
*/
public static final int MODE_RECEIVE_ONLY = 2;
private final InetAddress mLocalAddress;
@@ -89,15 +101,16 @@ public class RtpStream {
}
/**
* Returns {@code true} if the stream is busy. This method is intended to be
* overridden by subclasses.
* Returns {@code true} if the stream is busy. In this case most of the
* setter methods are disabled. This method is intended to be overridden
* by subclasses.
*/
public boolean isBusy() {
return false;
}
/**
* Returns the current mode. The initial mode is {@link #MODE_NORMAL}.
* Returns the current mode.
*/
public int getMode() {
return mMode;
@@ -123,7 +136,8 @@ public class RtpStream {
}
/**
* Associates with a remote host.
* Associates with a remote host. This defines the destination of the
* outgoing packets.
*
* @param address The network address of the remote host.
* @param port The network port of the remote host.

View File

@@ -0,0 +1,612 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.net.sip;
import java.util.ArrayList;
import java.util.Arrays;
/**
* An object used to manipulate messages of Session Description Protocol (SDP).
* It is mainly designed for the uses of Session Initiation Protocol (SIP).
* Therefore, it only handles connection addresses ("c="), bandwidth limits,
* ("b="), encryption keys ("k="), and attribute fields ("a="). Currently this
* implementation does not support multicast sessions.
*
* <p>Here is an example code to create a session description.</p>
* <pre>
* SimpleSessionDescription description = new SimpleSessionDescription(
* System.currentTimeMillis(), "1.2.3.4");
* Media media = description.newMedia("audio", 56789, 1, "RTP/AVP");
* media.setRtpPayload(0, "PCMU/8000", null);
* media.setRtpPayload(8, "PCMA/8000", null);
* media.setRtpPayload(127, "telephone-event/8000", "0-15");
* media.setAttribute("sendrecv", "");
* </pre>
* <p>Invoking <code>description.encode()</code> will produce a result like the
* one below.</p>
* <pre>
* v=0
* o=- 1284970442706 1284970442709 IN IP4 1.2.3.4
* s=-
* c=IN IP4 1.2.3.4
* t=0 0
* m=audio 56789 RTP/AVP 0 8 127
* a=rtpmap:0 PCMU/8000
* a=rtpmap:8 PCMA/8000
* a=rtpmap:127 telephone-event/8000
* a=fmtp:127 0-15
* a=sendrecv
* </pre>
* @hide
*/
public class SimpleSessionDescription {
private final Fields mFields = new Fields("voscbtka");
private final ArrayList<Media> mMedia = new ArrayList<Media>();
/**
* Creates a minimal session description from the given session ID and
* unicast address. The address is used in the origin field ("o=") and the
* connection field ("c="). See {@link SimpleSessionDescription} for an
* example of its usage.
*/
public SimpleSessionDescription(long sessionId, String address) {
address = (address.indexOf(':') < 0 ? "IN IP4 " : "IN IP6 ") + address;
mFields.parse("v=0");
mFields.parse(String.format("o=- %d %d %s", sessionId,
System.currentTimeMillis(), address));
mFields.parse("s=-");
mFields.parse("t=0 0");
mFields.parse("c=" + address);
}
/**
* Creates a session description from the given message.
*
* @throws IllegalArgumentException if message is invalid.
*/
public SimpleSessionDescription(String message) {
String[] lines = message.trim().replaceAll(" +", " ").split("[\r\n]+");
Fields fields = mFields;
for (String line : lines) {
try {
if (line.charAt(1) != '=') {
throw new IllegalArgumentException();
}
if (line.charAt(0) == 'm') {
String[] parts = line.substring(2).split(" ", 4);
String[] ports = parts[1].split("/", 2);
Media media = newMedia(parts[0], Integer.parseInt(ports[0]),
(ports.length < 2) ? 1 : Integer.parseInt(ports[1]),
parts[2]);
for (String format : parts[3].split(" ")) {
media.setFormat(format, null);
}
fields = media;
} else {
fields.parse(line);
}
} catch (Exception e) {
throw new IllegalArgumentException("Invalid SDP: " + line);
}
}
}
/**
* Creates a new media description in this session description.
*
* @param type The media type, e.g. {@code "audio"}.
* @param port The first transport port used by this media.
* @param portCount The number of contiguous ports used by this media.
* @param protocol The transport protocol, e.g. {@code "RTP/AVP"}.
*/
public Media newMedia(String type, int port, int portCount,
String protocol) {
Media media = new Media(type, port, portCount, protocol);
mMedia.add(media);
return media;
}
/**
* Returns all the media descriptions in this session description.
*/
public Media[] getMedia() {
return mMedia.toArray(new Media[mMedia.size()]);
}
/**
* Encodes the session description and all its media descriptions in a
* string. Note that the result might be incomplete if a required field
* has never been added before.
*/
public String encode() {
StringBuilder buffer = new StringBuilder();
mFields.write(buffer);
for (Media media : mMedia) {
media.write(buffer);
}
return buffer.toString();
}
/**
* Returns the connection address or {@code null} if it is not present.
*/
public String getAddress() {
return mFields.getAddress();
}
/**
* Sets the connection address. The field will be removed if the address
* is {@code null}.
*/
public void setAddress(String address) {
mFields.setAddress(address);
}
/**
* Returns the encryption method or {@code null} if it is not present.
*/
public String getEncryptionMethod() {
return mFields.getEncryptionMethod();
}
/**
* Returns the encryption key or {@code null} if it is not present.
*/
public String getEncryptionKey() {
return mFields.getEncryptionKey();
}
/**
* Sets the encryption method and the encryption key. The field will be
* removed if the method is {@code null}.
*/
public void setEncryption(String method, String key) {
mFields.setEncryption(method, key);
}
/**
* Returns the types of the bandwidth limits.
*/
public String[] getBandwidthTypes() {
return mFields.getBandwidthTypes();
}
/**
* Returns the bandwidth limit of the given type or {@code -1} if it is not
* present.
*/
public int getBandwidth(String type) {
return mFields.getBandwidth(type);
}
/**
* Sets the bandwith limit for the given type. The field will be removed if
* the value is negative.
*/
public void setBandwidth(String type, int value) {
mFields.setBandwidth(type, value);
}
/**
* Returns the names of all the attributes.
*/
public String[] getAttributeNames() {
return mFields.getAttributeNames();
}
/**
* Returns the attribute of the given name or {@code null} if it is not
* present.
*/
public String getAttribute(String name) {
return mFields.getAttribute(name);
}
/**
* Sets the attribute for the given name. The field will be removed if
* the value is {@code null}. To set a binary attribute, use an empty
* string as the value.
*/
public void setAttribute(String name, String value) {
mFields.setAttribute(name, value);
}
/**
* This class represents a media description of a session description. It
* can only be created by {@link SimpleSessionDescription#newMedia}. Since
* the syntax is more restricted for RTP based protocols, two sets of access
* methods are implemented. See {@link SimpleSessionDescription} for an
* example of its usage.
*/
public static class Media extends Fields {
private final String mType;
private final int mPort;
private final int mPortCount;
private final String mProtocol;
private ArrayList<String> mFormats = new ArrayList<String>();
private Media(String type, int port, int portCount, String protocol) {
super("icbka");
mType = type;
mPort = port;
mPortCount = portCount;
mProtocol = protocol;
}
/**
* Returns the media type.
*/
public String getType() {
return mType;
}
/**
* Returns the first transport port used by this media.
*/
public int getPort() {
return mPort;
}
/**
* Returns the number of contiguous ports used by this media.
*/
public int getPortCount() {
return mPortCount;
}
/**
* Returns the transport protocol.
*/
public String getProtocol() {
return mProtocol;
}
/**
* Returns the media formats.
*/
public String[] getFormats() {
return mFormats.toArray(new String[mFormats.size()]);
}
/**
* Returns the {@code fmtp} attribute of the given format or
* {@code null} if it is not present.
*/
public String getFmtp(String format) {
return super.get("a=fmtp:" + format, ' ');
}
/**
* Sets a format and its {@code fmtp} attribute. If the attribute is
* {@code null}, the corresponding field will be removed.
*/
public void setFormat(String format, String fmtp) {
mFormats.remove(format);
mFormats.add(format);
super.set("a=rtpmap:" + format, ' ', null);
super.set("a=fmtp:" + format, ' ', fmtp);
}
/**
* Removes a format and its {@code fmtp} attribute.
*/
public void removeFormat(String format) {
mFormats.remove(format);
super.set("a=rtpmap:" + format, ' ', null);
super.set("a=fmtp:" + format, ' ', null);
}
/**
* Returns the RTP payload types.
*/
public int[] getRtpPayloadTypes() {
int[] types = new int[mFormats.size()];
int length = 0;
for (String format : mFormats) {
try {
types[length] = Integer.parseInt(format);
++length;
} catch (NumberFormatException e) { }
}
return Arrays.copyOf(types, length);
}
/**
* Returns the {@code rtpmap} attribute of the given RTP payload type
* or {@code null} if it is not present.
*/
public String getRtpmap(int type) {
return super.get("a=rtpmap:" + type, ' ');
}
/**
* Returns the {@code fmtp} attribute of the given RTP payload type or
* {@code null} if it is not present.
*/
public String getFmtp(int type) {
return super.get("a=fmtp:" + type, ' ');
}
/**
* Sets a RTP payload type and its {@code rtpmap} and {@fmtp}
* attributes. If any of the attributes is {@code null}, the
* corresponding field will be removed. See
* {@link SimpleSessionDescription} for an example of its usage.
*/
public void setRtpPayload(int type, String rtpmap, String fmtp) {
String format = String.valueOf(type);
mFormats.remove(format);
mFormats.add(format);
super.set("a=rtpmap:" + format, ' ', rtpmap);
super.set("a=fmtp:" + format, ' ', fmtp);
}
/**
* Removes a RTP payload and its {@code rtpmap} and {@code fmtp}
* attributes.
*/
public void removeRtpPayload(int type) {
removeFormat(String.valueOf(type));
}
private void write(StringBuilder buffer) {
buffer.append("m=").append(mType).append(' ').append(mPort);
if (mPortCount != 1) {
buffer.append('/').append(mPortCount);
}
buffer.append(' ').append(mProtocol);
for (String format : mFormats) {
buffer.append(' ').append(format);
}
buffer.append("\r\n");
super.write(buffer);
}
}
/**
* This class acts as a set of fields, and the size of the set is expected
* to be small. Therefore, it uses a simple list instead of maps. Each field
* has three parts: a key, a delimiter, and a value. Delimiters are special
* because they are not included in binary attributes. As a result, the
* private methods, which are the building blocks of this class, all take
* the delimiter as an argument.
*/
private static class Fields {
private final String mOrder;
private final ArrayList<String> mLines = new ArrayList<String>();
Fields(String order) {
mOrder = order;
}
/**
* Returns the connection address or {@code null} if it is not present.
*/
public String getAddress() {
String address = get("c", '=');
if (address == null) {
return null;
}
String[] parts = address.split(" ");
if (parts.length != 3) {
return null;
}
int slash = parts[2].indexOf('/');
return (slash < 0) ? parts[2] : parts[2].substring(0, slash);
}
/**
* Sets the connection address. The field will be removed if the address
* is {@code null}.
*/
public void setAddress(String address) {
if (address != null) {
address = (address.indexOf(':') < 0 ? "IN IP4 " : "IN IP6 ") +
address;
}
set("c", '=', address);
}
/**
* Returns the encryption method or {@code null} if it is not present.
*/
public String getEncryptionMethod() {
String encryption = get("k", '=');
if (encryption == null) {
return null;
}
int colon = encryption.indexOf(':');
return (colon == -1) ? encryption : encryption.substring(0, colon);
}
/**
* Returns the encryption key or {@code null} if it is not present.
*/
public String getEncryptionKey() {
String encryption = get("k", '=');
if (encryption == null) {
return null;
}
int colon = encryption.indexOf(':');
return (colon == -1) ? null : encryption.substring(0, colon + 1);
}
/**
* Sets the encryption method and the encryption key. The field will be
* removed if the method is {@code null}.
*/
public void setEncryption(String method, String key) {
set("k", '=', (method == null || key == null) ?
method : method + ':' + key);
}
/**
* Returns the types of the bandwidth limits.
*/
public String[] getBandwidthTypes() {
return cut("b=", ':');
}
/**
* Returns the bandwidth limit of the given type or {@code -1} if it is
* not present.
*/
public int getBandwidth(String type) {
String value = get("b=" + type, ':');
if (value != null) {
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) { }
setBandwidth(type, -1);
}
return -1;
}
/**
* Sets the bandwith limit for the given type. The field will be removed
* if the value is negative.
*/
public void setBandwidth(String type, int value) {
set("b=" + type, ':', (value < 0) ? null : String.valueOf(value));
}
/**
* Returns the names of all the attributes.
*/
public String[] getAttributeNames() {
return cut("a=", ':');
}
/**
* Returns the attribute of the given name or {@code null} if it is not
* present.
*/
public String getAttribute(String name) {
return get("a=" + name, ':');
}
/**
* Sets the attribute for the given name. The field will be removed if
* the value is {@code null}. To set a binary attribute, use an empty
* string as the value.
*/
public void setAttribute(String name, String value) {
set("a=" + name, ':', value);
}
private void write(StringBuilder buffer) {
for (int i = 0; i < mOrder.length(); ++i) {
char type = mOrder.charAt(i);
for (String line : mLines) {
if (line.charAt(0) == type) {
buffer.append(line).append("\r\n");
}
}
}
}
/**
* Invokes {@link #set} after splitting the line into three parts.
*/
private void parse(String line) {
char type = line.charAt(0);
if (mOrder.indexOf(type) == -1) {
return;
}
char delimiter = '=';
if (line.startsWith("a=rtpmap:") || line.startsWith("a=fmtp:")) {
delimiter = ' ';
} else if (type == 'b' || type == 'a') {
delimiter = ':';
}
int i = line.indexOf(delimiter);
if (i == -1) {
set(line, delimiter, "");
} else {
set(line.substring(0, i), delimiter, line.substring(i + 1));
}
}
/**
* Finds the key with the given prefix and returns its suffix.
*/
private String[] cut(String prefix, char delimiter) {
String[] names = new String[mLines.size()];
int length = 0;
for (String line : mLines) {
if (line.startsWith(prefix)) {
int i = line.indexOf(delimiter);
if (i == -1) {
i = line.length();
}
names[length] = line.substring(prefix.length(), i);
++length;
}
}
return Arrays.copyOf(names, length);
}
/**
* Returns the index of the key.
*/
private int find(String key, char delimiter) {
int length = key.length();
for (int i = mLines.size() - 1; i >= 0; --i) {
String line = mLines.get(i);
if (line.startsWith(key) && (line.length() == length ||
line.charAt(length) == delimiter)) {
return i;
}
}
return -1;
}
/**
* Sets the key with the value or removes the key if the value is
* {@code null}.
*/
private void set(String key, char delimiter, String value) {
int index = find(key, delimiter);
if (value != null) {
if (value.length() != 0) {
key = key + delimiter + value;
}
if (index == -1) {
mLines.add(key);
} else {
mLines.set(index, key);
}
} else if (index != -1) {
mLines.remove(index);
}
}
/**
* Returns the value of the key.
*/
private String get(String key, char delimiter) {
int index = find(key, delimiter);
if (index == -1) {
return null;
}
String line = mLines.get(index);
int length = key.length();
return (line.length() == length) ? "" : line.substring(length + 1);
}
}
}

View File

@@ -16,8 +16,6 @@
package android.net.sip;
import gov.nist.javax.sdp.fields.SDPKeywords;
import android.content.Context;
import android.media.AudioManager;
import android.media.Ringtone;
@@ -28,6 +26,7 @@ import android.net.rtp.AudioCodec;
import android.net.rtp.AudioGroup;
import android.net.rtp.AudioStream;
import android.net.rtp.RtpStream;
import android.net.sip.SimpleSessionDescription.Media;
import android.net.wifi.WifiManager;
import android.os.Message;
import android.os.RemoteException;
@@ -38,15 +37,13 @@ import android.util.Log;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sdp.SdpException;
/**
* Class that handles an audio call over SIP.
* Class that handles an audio call over SIP.
*/
/** @hide */
public class SipAudioCallImpl extends SipSessionAdapter
@@ -54,20 +51,19 @@ public class SipAudioCallImpl extends SipSessionAdapter
private static final String TAG = SipAudioCallImpl.class.getSimpleName();
private static final boolean RELEASE_SOCKET = true;
private static final boolean DONT_RELEASE_SOCKET = false;
private static final String AUDIO = "audio";
private static final int DTMF = 101;
private static final int SESSION_TIMEOUT = 5; // in seconds
private Context mContext;
private SipProfile mLocalProfile;
private SipAudioCall.Listener mListener;
private ISipSession mSipSession;
private SdpSessionDescription mPeerSd;
private long mSessionId = System.currentTimeMillis();
private String mPeerSd;
private AudioStream mAudioStream;
private AudioGroup mAudioGroup;
private SdpSessionDescription.AudioCodec mCodec;
private long mSessionId = -1L; // SDP session ID
private boolean mInCall = false;
private boolean mMuted = false;
private boolean mHold = false;
@@ -146,7 +142,7 @@ public class SipAudioCallImpl extends SipSessionAdapter
mInCall = false;
mHold = false;
mSessionId = -1L;
mSessionId = System.currentTimeMillis();
mErrorCode = SipErrorCode.NO_ERROR;
mErrorMessage = null;
@@ -226,8 +222,8 @@ public class SipAudioCallImpl extends SipSessionAdapter
// session changing request
try {
mPeerSd = new SdpSessionDescription(sessionDescription);
answerCall(SESSION_TIMEOUT);
String answer = createAnswer(sessionDescription).encode();
mSipSession.answerCall(answer, SESSION_TIMEOUT);
} catch (Throwable e) {
Log.e(TAG, "onRinging()", e);
session.endCall();
@@ -242,12 +238,8 @@ public class SipAudioCallImpl extends SipSessionAdapter
String sessionDescription) {
stopRingbackTone();
stopRinging();
try {
mPeerSd = new SdpSessionDescription(sessionDescription);
Log.d(TAG, "sip call established: " + mPeerSd);
} catch (SdpException e) {
Log.e(TAG, "createSessionDescription()", e);
}
mPeerSd = sessionDescription;
Log.v(TAG, "onCallEstablished()" + mPeerSd);
Listener listener = mListener;
if (listener != null) {
@@ -332,10 +324,10 @@ public class SipAudioCallImpl extends SipSessionAdapter
public synchronized void attachCall(ISipSession session,
String sessionDescription) throws SipException {
mSipSession = session;
mPeerSd = sessionDescription;
Log.v(TAG, "attachCall()" + mPeerSd);
try {
mPeerSd = new SdpSessionDescription(sessionDescription);
session.setListener(this);
if (getState() == SipSessionState.INCOMING_CALL) startRinging();
} catch (Throwable e) {
Log.e(TAG, "attachCall()", e);
@@ -351,8 +343,8 @@ public class SipAudioCallImpl extends SipSessionAdapter
throw new SipException(
"Failed to create SipSession; network available?");
}
mSipSession.makeCall(peerProfile, createOfferSessionDescription(),
timeout);
mAudioStream = new AudioStream(InetAddress.getByName(getLocalIp()));
mSipSession.makeCall(peerProfile, createOffer().encode(), timeout);
} catch (Throwable e) {
if (e instanceof SipException) {
throw (SipException) e;
@@ -365,7 +357,7 @@ public class SipAudioCallImpl extends SipSessionAdapter
public synchronized void endCall() throws SipException {
try {
stopRinging();
stopCall(true);
stopCall(RELEASE_SOCKET);
mInCall = false;
// perform the above local ops first and then network op
@@ -375,123 +367,131 @@ public class SipAudioCallImpl extends SipSessionAdapter
}
}
public synchronized void holdCall(int timeout) throws SipException {
if (mHold) return;
try {
mSipSession.changeCall(createHoldSessionDescription(), timeout);
mHold = true;
} catch (Throwable e) {
throwSipException(e);
}
AudioGroup audioGroup = getAudioGroup();
if (audioGroup != null) audioGroup.setMode(AudioGroup.MODE_ON_HOLD);
}
public synchronized void answerCall(int timeout) throws SipException {
try {
stopRinging();
mSipSession.answerCall(createAnswerSessionDescription(), timeout);
mAudioStream = new AudioStream(InetAddress.getByName(getLocalIp()));
mSipSession.answerCall(createAnswer(mPeerSd).encode(), timeout);
} catch (Throwable e) {
Log.e(TAG, "answerCall()", e);
throwSipException(e);
}
}
public synchronized void continueCall(int timeout) throws SipException {
if (!mHold) return;
public synchronized void holdCall(int timeout) throws SipException {
if (mHold) return;
try {
mHold = false;
mSipSession.changeCall(createContinueSessionDescription(), timeout);
mSipSession.changeCall(createHoldOffer().encode(), timeout);
} catch (Throwable e) {
throwSipException(e);
}
mHold = true;
AudioGroup audioGroup = getAudioGroup();
if (audioGroup != null) audioGroup.setMode(AudioGroup.MODE_ON_HOLD);
}
public synchronized void continueCall(int timeout) throws SipException {
if (!mHold) return;
try {
mSipSession.changeCall(createContinueOffer().encode(), timeout);
} catch (Throwable e) {
throwSipException(e);
}
mHold = false;
AudioGroup audioGroup = getAudioGroup();
if (audioGroup != null) audioGroup.setMode(AudioGroup.MODE_NORMAL);
}
private String createOfferSessionDescription() {
AudioCodec[] codecs = AudioCodec.getSystemSupportedCodecs();
return createSdpBuilder(true, convert(codecs)).build();
}
private String createAnswerSessionDescription() {
try {
// choose an acceptable media from mPeerSd to answer
SdpSessionDescription.AudioCodec codec = getCodec(mPeerSd);
SdpSessionDescription.Builder sdpBuilder =
createSdpBuilder(false, codec);
if (mPeerSd.isSendOnly(AUDIO)) {
sdpBuilder.addMediaAttribute(AUDIO, "recvonly", (String) null);
} else if (mPeerSd.isReceiveOnly(AUDIO)) {
sdpBuilder.addMediaAttribute(AUDIO, "sendonly", (String) null);
}
return sdpBuilder.build();
} catch (SdpException e) {
throw new RuntimeException(e);
private SimpleSessionDescription createOffer() {
SimpleSessionDescription offer =
new SimpleSessionDescription(mSessionId, getLocalIp());
AudioCodec[] codecs = AudioCodec.getCodecs();
Media media = offer.newMedia(
"audio", mAudioStream.getLocalPort(), 1, "RTP/AVP");
for (AudioCodec codec : AudioCodec.getCodecs()) {
media.setRtpPayload(codec.type, codec.rtpmap, codec.fmtp);
}
media.setRtpPayload(127, "telephone-event/8000", "0-15");
return offer;
}
private String createHoldSessionDescription() {
try {
return createSdpBuilder(false, mCodec)
.addMediaAttribute(AUDIO, "sendonly", (String) null)
.build();
} catch (SdpException e) {
throw new RuntimeException(e);
}
}
private SimpleSessionDescription createAnswer(String offerSd) {
SimpleSessionDescription offer =
new SimpleSessionDescription(offerSd);
SimpleSessionDescription answer =
new SimpleSessionDescription(mSessionId, getLocalIp());
AudioCodec codec = null;
for (Media media : offer.getMedia()) {
if ((codec == null) && (media.getPort() > 0)
&& "audio".equals(media.getType())
&& "RTP/AVP".equals(media.getProtocol())) {
// Find the first audio codec we supported.
for (int type : media.getRtpPayloadTypes()) {
codec = AudioCodec.getCodec(type, media.getRtpmap(type),
media.getFmtp(type));
if (codec != null) {
break;
}
}
if (codec != null) {
Media reply = answer.newMedia(
"audio", mAudioStream.getLocalPort(), 1, "RTP/AVP");
reply.setRtpPayload(codec.type, codec.rtpmap, codec.fmtp);
private String createContinueSessionDescription() {
return createSdpBuilder(true, mCodec).build();
}
// Check if DTMF is supported in the same media.
for (int type : media.getRtpPayloadTypes()) {
String rtpmap = media.getRtpmap(type);
if ((type != codec.type) && (rtpmap != null)
&& rtpmap.startsWith("telephone-event")) {
reply.setRtpPayload(
type, rtpmap, media.getFmtp(type));
}
}
private String getMediaDescription(SdpSessionDescription.AudioCodec codec) {
return String.format("%d %s/%d", codec.payloadType, codec.name,
codec.sampleRate);
}
private long getSessionId() {
if (mSessionId < 0) {
mSessionId = System.currentTimeMillis();
}
return mSessionId;
}
private SdpSessionDescription.Builder createSdpBuilder(
boolean addTelephoneEvent,
SdpSessionDescription.AudioCodec... codecs) {
String localIp = getLocalIp();
SdpSessionDescription.Builder sdpBuilder;
try {
long sessionVersion = System.currentTimeMillis();
sdpBuilder = new SdpSessionDescription.Builder("SIP Call")
.setOrigin(mLocalProfile, getSessionId(), sessionVersion,
SDPKeywords.IN, SDPKeywords.IPV4, localIp)
.setConnectionInfo(SDPKeywords.IN, SDPKeywords.IPV4,
localIp);
List<Integer> codecIds = new ArrayList<Integer>();
for (SdpSessionDescription.AudioCodec codec : codecs) {
codecIds.add(codec.payloadType);
// Handle recvonly and sendonly.
if (media.getAttribute("recvonly") != null) {
answer.setAttribute("sendonly", "");
} else if(media.getAttribute("sendonly") != null) {
answer.setAttribute("recvonly", "");
} else if(offer.getAttribute("recvonly") != null) {
answer.setAttribute("sendonly", "");
} else if(offer.getAttribute("sendonly") != null) {
answer.setAttribute("recvonly", "");
}
continue;
}
}
if (addTelephoneEvent) codecIds.add(DTMF);
sdpBuilder.addMedia(AUDIO, getLocalMediaPort(), 1, "RTP/AVP",
codecIds.toArray(new Integer[codecIds.size()]));
for (SdpSessionDescription.AudioCodec codec : codecs) {
sdpBuilder.addMediaAttribute(AUDIO, "rtpmap",
getMediaDescription(codec));
// Reject the media.
Media reply = answer.newMedia(
media.getType(), 0, 1, media.getProtocol());
for (String format : media.getFormats()) {
reply.setFormat(format, null);
}
if (addTelephoneEvent) {
sdpBuilder.addMediaAttribute(AUDIO, "rtpmap",
DTMF + " telephone-event/8000");
}
// FIXME: deal with vbr codec
sdpBuilder.addMediaAttribute(AUDIO, "ptime", "20");
} catch (SdpException e) {
throw new RuntimeException(e);
}
return sdpBuilder;
if (codec == null) {
throw new IllegalStateException("Reject SDP: no suitable codecs");
}
return answer;
}
private SimpleSessionDescription createHoldOffer() {
SimpleSessionDescription offer = createContinueOffer();
offer.setAttribute("sendonly", "");
return offer;
}
private SimpleSessionDescription createContinueOffer() {
SimpleSessionDescription offer =
new SimpleSessionDescription(mSessionId, getLocalIp());
Media media = offer.newMedia(
"audio", mAudioStream.getLocalPort(), 1, "RTP/AVP");
AudioCodec codec = mAudioStream.getCodec();
media.setRtpPayload(codec.type, codec.rtpmap, codec.fmtp);
int dtmfType = mAudioStream.getDtmfType();
if (dtmfType != -1) {
media.setRtpPayload(dtmfType, "telephone-event/8000", "0-15");
}
return offer;
}
public synchronized void toggleMute() {
@@ -532,49 +532,16 @@ public class SipAudioCallImpl extends SipSessionAdapter
public synchronized AudioGroup getAudioGroup() {
if (mAudioGroup != null) return mAudioGroup;
return ((mAudioStream == null) ? null : mAudioStream.getAudioGroup());
return ((mAudioStream == null) ? null : mAudioStream.getGroup());
}
public synchronized void setAudioGroup(AudioGroup group) {
if ((mAudioStream != null) && (mAudioStream.getAudioGroup() != null)) {
if ((mAudioStream != null) && (mAudioStream.getGroup() != null)) {
mAudioStream.join(group);
}
mAudioGroup = group;
}
private SdpSessionDescription.AudioCodec getCodec(SdpSessionDescription sd) {
HashMap<String, AudioCodec> acceptableCodecs =
new HashMap<String, AudioCodec>();
for (AudioCodec codec : AudioCodec.getSystemSupportedCodecs()) {
acceptableCodecs.put(codec.name, codec);
}
for (SdpSessionDescription.AudioCodec codec : sd.getAudioCodecs()) {
AudioCodec matchedCodec = acceptableCodecs.get(codec.name);
if (matchedCodec != null) return codec;
}
Log.w(TAG, "no common codec is found, use PCM/0");
return convert(AudioCodec.ULAW);
}
private AudioCodec convert(SdpSessionDescription.AudioCodec codec) {
AudioCodec c = AudioCodec.getSystemSupportedCodec(codec.name);
return ((c == null) ? AudioCodec.ULAW : c);
}
private SdpSessionDescription.AudioCodec convert(AudioCodec codec) {
return new SdpSessionDescription.AudioCodec(codec.defaultType,
codec.name, codec.sampleRate, codec.sampleCount);
}
private SdpSessionDescription.AudioCodec[] convert(AudioCodec[] codecs) {
SdpSessionDescription.AudioCodec[] copies =
new SdpSessionDescription.AudioCodec[codecs.length];
for (int i = 0, len = codecs.length; i < len; i++) {
copies[i] = convert(codecs[i]);
}
return copies;
}
public void startAudio() {
try {
startAudioInternal();
@@ -588,41 +555,75 @@ public class SipAudioCallImpl extends SipSessionAdapter
}
private synchronized void startAudioInternal() throws UnknownHostException {
if (mPeerSd == null) {
Log.v(TAG, "startAudioInternal() mPeerSd = null");
throw new IllegalStateException("mPeerSd = null");
}
stopCall(DONT_RELEASE_SOCKET);
mInCall = true;
SdpSessionDescription peerSd = mPeerSd;
String peerMediaAddress = peerSd.getPeerMediaAddress(AUDIO);
// TODO: handle multiple media fields
int peerMediaPort = peerSd.getPeerMediaPort(AUDIO);
Log.i(TAG, "start audiocall " + peerMediaAddress + ":" + peerMediaPort);
int localPort = getLocalMediaPort();
int sampleRate = 8000;
int frameSize = sampleRate / 50; // 160
// Run exact the same logic in createAnswer() to setup mAudioStream.
SimpleSessionDescription offer =
new SimpleSessionDescription(mPeerSd);
AudioStream stream = mAudioStream;
AudioCodec codec = null;
for (Media media : offer.getMedia()) {
if ((codec == null) && (media.getPort() > 0)
&& "audio".equals(media.getType())
&& "RTP/AVP".equals(media.getProtocol())) {
// Find the first audio codec we supported.
for (int type : media.getRtpPayloadTypes()) {
codec = AudioCodec.getCodec(
type, media.getRtpmap(type), media.getFmtp(type));
if (codec != null) {
break;
}
}
// TODO: get sample rate from sdp
mCodec = getCodec(peerSd);
if (codec != null) {
// Associate with the remote host.
String address = media.getAddress();
if (address == null) {
address = offer.getAddress();
}
stream.associate(InetAddress.getByName(address),
media.getPort());
AudioStream audioStream = mAudioStream;
audioStream.associate(InetAddress.getByName(peerMediaAddress),
peerMediaPort);
audioStream.setCodec(convert(mCodec), mCodec.payloadType);
audioStream.setDtmfType(DTMF);
Log.d(TAG, "start media: localPort=" + localPort + ", peer="
+ peerMediaAddress + ":" + peerMediaPort);
stream.setDtmfType(-1);
stream.setCodec(codec);
// Check if DTMF is supported in the same media.
for (int type : media.getRtpPayloadTypes()) {
String rtpmap = media.getRtpmap(type);
if ((type != codec.type) && (rtpmap != null)
&& rtpmap.startsWith("telephone-event")) {
stream.setDtmfType(type);
}
}
// Handle recvonly and sendonly.
if (mHold) {
stream.setMode(RtpStream.MODE_NORMAL);
} else if (media.getAttribute("recvonly") != null) {
stream.setMode(RtpStream.MODE_SEND_ONLY);
} else if(media.getAttribute("sendonly") != null) {
stream.setMode(RtpStream.MODE_RECEIVE_ONLY);
} else if(offer.getAttribute("recvonly") != null) {
stream.setMode(RtpStream.MODE_SEND_ONLY);
} else if(offer.getAttribute("sendonly") != null) {
stream.setMode(RtpStream.MODE_RECEIVE_ONLY);
} else {
stream.setMode(RtpStream.MODE_NORMAL);
}
break;
}
}
}
if (codec == null) {
throw new IllegalStateException("Reject SDP: no suitable codecs");
}
audioStream.setMode(RtpStream.MODE_NORMAL);
if (!mHold) {
// FIXME: won't work if peer is not sending nor receiving
if (!peerSd.isSending(AUDIO)) {
Log.d(TAG, " not receiving");
audioStream.setMode(RtpStream.MODE_SEND_ONLY);
}
if (!peerSd.isReceiving(AUDIO)) {
Log.d(TAG, " not sending");
audioStream.setMode(RtpStream.MODE_RECEIVE_ONLY);
}
/* The recorder volume will be very low if the device is in
* IN_CALL mode. Therefore, we have to set the mode to NORMAL
* in order to have the normal microphone level.
@@ -642,7 +643,7 @@ public class SipAudioCallImpl extends SipSessionAdapter
// there's another AudioGroup out there that's active
} else {
if (audioGroup == null) audioGroup = new AudioGroup();
audioStream.join(audioGroup);
mAudioStream.join(audioGroup);
if (mMuted) {
audioGroup.setMode(AudioGroup.MODE_MUTED);
} else {
@@ -663,24 +664,11 @@ public class SipAudioCallImpl extends SipSessionAdapter
}
}
private int getLocalMediaPort() {
if (mAudioStream != null) return mAudioStream.getLocalPort();
try {
AudioStream s = mAudioStream =
new AudioStream(InetAddress.getByName(getLocalIp()));
return s.getLocalPort();
} catch (IOException e) {
Log.w(TAG, "getLocalMediaPort(): " + e);
throw new RuntimeException(e);
}
}
private String getLocalIp() {
try {
return mSipSession.getLocalIp();
} catch (RemoteException e) {
// FIXME
return "127.0.0.1";
throw new IllegalStateException(e);
}
}

View File

@@ -36,9 +36,9 @@ int8_t gExponents[128] = {
class UlawCodec : public AudioCodec
{
public:
bool set(int sampleRate, int sampleCount) {
mSampleCount = sampleCount;
return sampleCount > 0;
int set(int sampleRate, const char *fmtp) {
mSampleCount = sampleRate / 50;
return mSampleCount;
}
int encode(void *payload, int16_t *samples);
int decode(int16_t *samples, void *payload, int length);
@@ -89,9 +89,9 @@ AudioCodec *newUlawCodec()
class AlawCodec : public AudioCodec
{
public:
bool set(int sampleRate, int sampleCount) {
mSampleCount = sampleCount;
return sampleCount > 0;
int set(int sampleRate, const char *fmtp) {
mSampleCount = sampleRate / 50;
return mSampleCount;
}
int encode(void *payload, int16_t *samples);
int decode(int16_t *samples, void *payload, int length);
@@ -152,8 +152,10 @@ AudioCodec *newAudioCodec(const char *codecName)
{
AudioCodecType *type = gAudioCodecTypes;
while (type->name != NULL) {
if (strcmp(codecName, type->name) == 0) {
return type->create();
if (strcasecmp(codecName, type->name) == 0) {
AudioCodec *codec = type->create();
codec->name = type->name;
return codec;
}
++type;
}

View File

@@ -22,9 +22,11 @@
class AudioCodec
{
public:
const char *name;
// Needed by destruction through base class pointers.
virtual ~AudioCodec() {}
// Returns true if initialization succeeds.
virtual bool set(int sampleRate, int sampleCount) = 0;
// Returns sampleCount or non-positive value if unsupported.
virtual int set(int sampleRate, const char *fmtp) = 0;
// Returns the length of payload in bytes.
virtual int encode(void *payload, int16_t *samples) = 0;
// Returns the number of decoded samples.

View File

@@ -77,7 +77,7 @@ public:
AudioStream();
~AudioStream();
bool set(int mode, int socket, sockaddr_storage *remote,
const char *codecName, int sampleRate, int sampleCount,
AudioCodec *codec, int sampleRate, int sampleCount,
int codecType, int dtmfType);
void sendDtmf(int event);
@@ -104,6 +104,7 @@ private:
int mSampleRate;
int mSampleCount;
int mInterval;
int mLogThrottle;
int16_t *mBuffer;
int mBufferMask;
@@ -140,7 +141,7 @@ AudioStream::~AudioStream()
}
bool AudioStream::set(int mode, int socket, sockaddr_storage *remote,
const char *codecName, int sampleRate, int sampleCount,
AudioCodec *codec, int sampleRate, int sampleCount,
int codecType, int dtmfType)
{
if (mode < 0 || mode > LAST_MODE) {
@@ -148,14 +149,6 @@ bool AudioStream::set(int mode, int socket, sockaddr_storage *remote,
}
mMode = mode;
if (codecName) {
mRemote = *remote;
mCodec = newAudioCodec(codecName);
if (!mCodec || !mCodec->set(sampleRate, sampleCount)) {
return false;
}
}
mCodecMagic = (0x8000 | codecType) << 16;
mDtmfMagic = (dtmfType == -1) ? 0 : (0x8000 | dtmfType) << 16;
@@ -181,11 +174,15 @@ bool AudioStream::set(int mode, int socket, sockaddr_storage *remote,
mDtmfEvent = -1;
mDtmfStart = 0;
// Only take over the socket when succeeded.
// Only take over these things when succeeded.
mSocket = socket;
if (codec) {
mRemote = *remote;
mCodec = codec;
}
LOGD("stream[%d] is configured as %s %dkHz %dms", mSocket,
(codecName ? codecName : "RAW"), mSampleRate, mInterval);
(codec ? codec->name : "RAW"), mSampleRate, mInterval);
return true;
}
@@ -282,7 +279,10 @@ void AudioStream::encode(int tick, AudioStream *chain)
chain = chain->mNext;
}
if (!mixed) {
LOGD("stream[%d] no data", mSocket);
if ((mTick ^ mLogThrottle) >> 10) {
mLogThrottle = mTick;
LOGD("stream[%d] no data", mSocket);
}
return;
}
@@ -831,10 +831,9 @@ static jfieldID gMode;
void add(JNIEnv *env, jobject thiz, jint mode,
jint socket, jstring jRemoteAddress, jint remotePort,
jstring jCodecName, jint sampleRate, jint sampleCount,
jint codecType, jint dtmfType)
jstring jCodecSpec, jint dtmfType)
{
const char *codecName = NULL;
AudioCodec *codec = NULL;
AudioStream *stream = NULL;
AudioGroup *group = NULL;
@@ -842,33 +841,42 @@ void add(JNIEnv *env, jobject thiz, jint mode,
sockaddr_storage remote;
if (parse(env, jRemoteAddress, remotePort, &remote) < 0) {
// Exception already thrown.
goto error;
return;
}
if (sampleRate < 0 || sampleCount < 0 || codecType < 0 || codecType > 127) {
jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
goto error;
if (!jCodecSpec) {
jniThrowNullPointerException(env, "codecSpec");
return;
}
if (!jCodecName) {
jniThrowNullPointerException(env, "codecName");
goto error;
}
codecName = env->GetStringUTFChars(jCodecName, NULL);
if (!codecName) {
const char *codecSpec = env->GetStringUTFChars(jCodecSpec, NULL);
if (!codecSpec) {
// Exception already thrown.
return;
}
// Create audio codec.
int codecType = -1;
char codecName[16];
int sampleRate = -1;
sscanf(codecSpec, "%d %[^/]%*c%d", &codecType, codecName, &sampleRate);
codec = newAudioCodec(codecName);
int sampleCount = (codec ? codec->set(sampleRate, codecSpec) : -1);
env->ReleaseStringUTFChars(jCodecSpec, codecSpec);
if (sampleCount <= 0) {
jniThrowException(env, "java/lang/IllegalStateException",
"cannot initialize audio codec");
goto error;
}
// Create audio stream.
stream = new AudioStream;
if (!stream->set(mode, socket, &remote, codecName, sampleRate, sampleCount,
if (!stream->set(mode, socket, &remote, codec, sampleRate, sampleCount,
codecType, dtmfType)) {
jniThrowException(env, "java/lang/IllegalStateException",
"cannot initialize audio stream");
env->ReleaseStringUTFChars(jCodecName, codecName);
goto error;
}
env->ReleaseStringUTFChars(jCodecName, codecName);
socket = -1;
codec = NULL;
// Create audio group.
group = (AudioGroup *)env->GetIntField(thiz, gNative);
@@ -896,6 +904,7 @@ void add(JNIEnv *env, jobject thiz, jint mode,
error:
delete group;
delete stream;
delete codec;
close(socket);
env->SetIntField(thiz, gNative, NULL);
}
@@ -930,7 +939,7 @@ void sendDtmf(JNIEnv *env, jobject thiz, jint event)
}
JNINativeMethod gMethods[] = {
{"add", "(IILjava/lang/String;ILjava/lang/String;IIII)V", (void *)add},
{"add", "(IILjava/lang/String;ILjava/lang/String;I)V", (void *)add},
{"remove", "(I)V", (void *)remove},
{"setMode", "(I)V", (void *)setMode},
{"sendDtmf", "(I)V", (void *)sendDtmf},